text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestIgnoreHealthError(t *testing.T) { // This test verify the tablet health by Ignoring the error // For this case we need a healthy tablet in a shard without any master. // When we try to make a connection to such tablet we get "no slave status" error. // We will then ignore this error and verify if the status report the tablet as Healthy. // Create a new shard defer cluster.PanicHandler(t) newShard := &cluster.Shard{ Name: "1", } // Start mysql process tablet := clusterInstance.GetVttabletInstance("replica", 0, "") tablet.MysqlctlProcess = *cluster.MysqlCtlProcessInstance(tablet.TabletUID, tablet.MySQLPort, clusterInstance.TmpDirectory) err := tablet.MysqlctlProcess.Start() require.Nil(t, err) // start vttablet process tablet.VttabletProcess = cluster.VttabletProcessInstance(tablet.HTTPPort, tablet.GrpcPort, tablet.TabletUID, clusterInstance.Cell, newShard.Name, clusterInstance.Keyspaces[0].Name, clusterInstance.VtctldProcess.Port, tablet.Type, clusterInstance.TopoProcess.Port, clusterInstance.Hostname, clusterInstance.TmpDirectory, clusterInstance.VtTabletExtraArgs, clusterInstance.EnableSemiSync) tablet.Alias = tablet.VttabletProcess.TabletPath newShard.Vttablets = append(newShard.Vttablets, tablet) clusterInstance.Keyspaces[0].Shards = append(clusterInstance.Keyspaces[0].Shards, *newShard) // Init Tablet err = clusterInstance.VtctlclientProcess.InitTablet(tablet, cell, keyspaceName, hostname, newShard.Name) require.Nil(t, err) // create database err = tablet.VttabletProcess.CreateDB(keyspaceName) require.Nil(t, err) // Start Vttablet, it should be NOT_SERVING as there is no master err = clusterInstance.StartVttablet(tablet, "NOT_SERVING", false, cell, keyspaceName, hostname, newShard.Name) require.Nil(t, err) // Force it healthy. err = clusterInstance.VtctlclientProcess.ExecuteCommand("IgnoreHealthError", tablet.Alias, ".*no slave status.*") require.Nil(t, err) err = clusterInstance.VtctlclientProcess.ExecuteCommand("RunHealthCheck", tablet.Alias) require.Nil(t, err) err = tablet.VttabletProcess.WaitForTabletType("SERVING") require.Nil(t, err) checkHealth(t, tablet.HTTPPort, false) // Turn off the force-healthy. err = clusterInstance.VtctlclientProcess.ExecuteCommand("IgnoreHealthError", tablet.Alias, "") require.Nil(t, err) err = clusterInstance.VtctlclientProcess.ExecuteCommand("RunHealthCheck", tablet.Alias) require.Nil(t, err) err = tablet.VttabletProcess.WaitForTabletType("NOT_SERVING") require.Nil(t, err) checkHealth(t, tablet.HTTPPort, true) // Tear down custom processes killTablets(t, tablet) }
explode_data.jsonl/71231
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 921 }
[ 2830, 3393, 12497, 14542, 1454, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 1273, 10146, 279, 20697, 2820, 553, 33781, 5503, 279, 1465, 198, 197, 322, 1752, 419, 1142, 582, 1184, 264, 9314, 20697, 304, 264, 52069, 2041, 894, 7341, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAppWithReorderedMiddleware(t *testing.T) { app = nil MiddlewareStack = []negroni.Handler{ negroni.Handler(negroni.HandlerFunc(customMiddleware)), } r := mux.NewRouter() r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {}) n := GetApp() n.UseHandler(r) server := httptest.NewServer(n) defer server.Close() resp, err := http.Get(server.URL) if err != nil { t.Fatal("expected run without errors but was", err.Error()) } body, err := ioutil.ReadAll(resp.Body) if err != nil { t.Fatal("expected run without errors but was", err.Error()) } defer resp.Body.Close() if !strings.Contains(string(body), "Calling custom middleware") { t.Error("do not contains 'Calling custom middleware'") } if !strings.Contains(resp.Header.Get("Content-Type"), "application/json") { t.Error("content type should application/json but wasn't") } MiddlewareStack = []negroni.Handler{} }
explode_data.jsonl/51057
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 333 }
[ 2830, 3393, 1949, 2164, 2354, 693, 10544, 24684, 1155, 353, 8840, 836, 8, 341, 28236, 284, 2092, 198, 9209, 11603, 4336, 284, 3056, 28775, 2248, 72, 31010, 515, 197, 9038, 791, 2248, 72, 31010, 1445, 791, 2248, 72, 89164, 61301, 24684, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransformTypes(t *testing.T) { tests := []struct { commonField common.Field expected interface{} }{ {commonField: common.Field{}, expected: "string"}, {commonField: common.Field{Type: "half_float"}, expected: "number"}, {commonField: common.Field{Type: "scaled_float"}, expected: "number"}, {commonField: common.Field{Type: "float"}, expected: "number"}, {commonField: common.Field{Type: "integer"}, expected: "number"}, {commonField: common.Field{Type: "long"}, expected: "number"}, {commonField: common.Field{Type: "short"}, expected: "number"}, {commonField: common.Field{Type: "byte"}, expected: "number"}, {commonField: common.Field{Type: "keyword"}, expected: "string"}, {commonField: common.Field{Type: "text"}, expected: "string"}, {commonField: common.Field{Type: "string"}, expected: nil}, {commonField: common.Field{Type: "date"}, expected: "date"}, {commonField: common.Field{Type: "geo_point"}, expected: "geo_point"}, {commonField: common.Field{Type: "invalid"}, expected: nil}, } for idx, test := range tests { trans, _ := newFieldsTransformer(version, common.Fields{test.commonField}) transformed, err := trans.transform() assert.NoError(t, err) out := transformed["fields"].([]common.MapStr)[0] assert.Equal(t, test.expected, out["type"], fmt.Sprintf("Failed for idx %v", idx)) } }
explode_data.jsonl/37634
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 462 }
[ 2830, 3393, 8963, 4173, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 83825, 1877, 4185, 17087, 198, 197, 42400, 262, 3749, 16094, 197, 59403, 197, 197, 90, 5464, 1877, 25, 4185, 17087, 22655, 3601, 25, 330, 917, 71...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHandleConn(t *testing.T) { s := &Server{} go func() { err := s.Listen() if err != nil { log.Fatal(err) } }() // Instantiates a new Client c := &Client{} // Creates a new MQTT CONNECT message and sets the proper parameters msg := message.NewConnectMessage() msg.SetWillQos(1) msg.SetVersion(4) msg.SetCleanSession(true) msg.SetClientId([]byte("surgemq")) msg.SetKeepAlive(10) msg.SetWillTopic([]byte("will")) msg.SetWillMessage([]byte("send me home")) msg.SetUsername([]byte("surgemq")) msg.SetPassword([]byte("verysecret")) // Connects to the remote server at 127.0.0.1 port 1883 c.Connect("tcp://127.0.0.1:1883", msg) // Creates a new SUBSCRIBE message to subscribe to topic "abc" submsg := message.NewSubscribeMessage() submsg.AddTopic([]byte("abc"), 0) // Subscribes to the topic by sending the message. The first nil in the function // call is a OnCompleteFunc that should handle the SUBACK message from the server. // Nil means we are ignoring the SUBACK messages. The second nil should be a // OnPublishFunc that handles any messages send to the client because of this // subscription. Nil means we are ignoring any PUBLISH messages for this topic. c.Subscribe(submsg, nil, nil) // Creates a new PUBLISH message with the appropriate contents for publishing pubmsg := message.NewPublishMessage() pubmsg.SetPacketId(pktid) pubmsg.SetTopic([]byte("abc")) pubmsg.SetPayload(make([]byte, 1024)) pubmsg.SetQoS(qos) // Publishes to the server by sending the message c.Publish(pubmsg, nil) // Disconnects from the server c.Disconnect() }
explode_data.jsonl/7608
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 546 }
[ 2830, 3393, 6999, 9701, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 5475, 31483, 30680, 2915, 368, 341, 197, 9859, 1669, 274, 68334, 741, 197, 743, 1848, 961, 2092, 341, 298, 6725, 26133, 3964, 340, 197, 197, 532, 197, 69826, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMmap(t *testing.T) { b, err := unix.Mmap(-1, 0, unix.Getpagesize(), unix.PROT_NONE, unix.MAP_ANON|unix.MAP_PRIVATE) if err != nil { t.Fatalf("Mmap: %v", err) } if err := unix.Munmap(b); err != nil { t.Fatalf("Munmap: %v", err) } }
explode_data.jsonl/68210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 44, 2186, 1155, 353, 8840, 836, 8, 341, 2233, 11, 1848, 1669, 51866, 1321, 2186, 4080, 16, 11, 220, 15, 11, 51866, 2234, 84917, 1507, 51866, 29687, 51, 14904, 11, 51866, 1321, 2537, 23615, 711, 91, 56646, 1321, 2537, 30470...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFindManifests_Exclude_NothingMatches(t *testing.T) { objs, err := findManifests("testdata/app-include-exclude", ".", nil, argoappv1.ApplicationSourceDirectory{ Recurse: true, Exclude: "nothing.yaml", }, map[string]bool{}) if !assert.NoError(t, err) || !assert.Len(t, objs, 2) { return } assert.ElementsMatch(t, []string{"nginx-deployment", "nginx-deployment-sub"}, []string{objs[0].GetName(), objs[1].GetName()}) }
explode_data.jsonl/5702
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 9885, 38495, 82, 62531, 857, 36989, 1596, 42470, 1155, 353, 8840, 836, 8, 341, 22671, 82, 11, 1848, 1669, 1477, 38495, 82, 445, 92425, 10640, 12, 997, 10187, 857, 497, 68514, 2092, 11, 1392, 78, 676, 85, 16, 17521, 3608, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestErrorPropagation(t *testing.T) { createSandbox(KsTestUnsharded) hcVTGateTest.Reset() sbcm := hcVTGateTest.AddTestTablet("aa", "1.1.1.1", 1001, KsTestUnsharded, "0", topodatapb.TabletType_MASTER, true, 1, nil) sbcrdonly := hcVTGateTest.AddTestTablet("aa", "1.1.1.2", 1001, KsTestUnsharded, "0", topodatapb.TabletType_RDONLY, true, 1, nil) sbcs := []*sandboxconn.SandboxConn{ sbcm, sbcrdonly, } // ErrorCode_CANCELLED testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailCanceled = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailCanceled = 0 }, vtrpcpb.ErrorCode_CANCELLED) // ErrorCode_UNKNOWN_ERROR testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailUnknownError = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailUnknownError = 0 }, vtrpcpb.ErrorCode_UNKNOWN_ERROR) // ErrorCode_BAD_INPUT testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailServer = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailServer = 0 }, vtrpcpb.ErrorCode_BAD_INPUT) // ErrorCode_DEADLINE_EXCEEDED testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailDeadlineExceeded = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailDeadlineExceeded = 0 }, vtrpcpb.ErrorCode_DEADLINE_EXCEEDED) // ErrorCode_INTEGRITY_ERROR testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailIntegrityError = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailIntegrityError = 0 }, vtrpcpb.ErrorCode_INTEGRITY_ERROR) // ErrorCode_PERMISSION_DENIED testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailPermissionDenied = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailPermissionDenied = 0 }, vtrpcpb.ErrorCode_PERMISSION_DENIED) // ErrorCode_RESOURCE_EXHAUSTED testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailTxPool = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailTxPool = 0 }, vtrpcpb.ErrorCode_RESOURCE_EXHAUSTED) // ErrorCode_QUERY_NOT_SERVED testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailRetry = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailRetry = 0 }, vtrpcpb.ErrorCode_QUERY_NOT_SERVED) // ErrorCode_NOT_IN_TX testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailNotTx = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailNotTx = 0 }, vtrpcpb.ErrorCode_NOT_IN_TX) // ErrorCode_INTERNAL_ERROR testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailFatal = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailFatal = 0 }, vtrpcpb.ErrorCode_INTERNAL_ERROR) // ErrorCode_TRANSIENT_ERROR testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailTransientError = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailTransientError = 0 }, vtrpcpb.ErrorCode_TRANSIENT_ERROR) // ErrorCode_UNAUTHENTICATED testErrorPropagation(t, sbcs, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailUnauthenticated = 20 }, func(sbc *sandboxconn.SandboxConn) { sbc.MustFailUnauthenticated = 0 }, vtrpcpb.ErrorCode_UNAUTHENTICATED) }
explode_data.jsonl/7852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1405 }
[ 2830, 3393, 1454, 35172, 1155, 353, 8840, 836, 8, 341, 39263, 50, 31536, 16738, 82, 2271, 1806, 927, 20958, 340, 9598, 66, 20457, 42318, 2271, 36660, 741, 24842, 6226, 1669, 50394, 20457, 42318, 2271, 1904, 2271, 2556, 83, 445, 5305, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHtlcIncomingResolverFwdPreimageKnown(t *testing.T) { t.Parallel() defer timeout(t)() ctx := newIncomingResolverTestContext(t, false) ctx.witnessBeacon.lookupPreimage[testResHash] = testResPreimage ctx.resolve() ctx.waitForResult(true) }
explode_data.jsonl/30609
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 39, 11544, 66, 97564, 18190, 37, 6377, 4703, 1805, 48206, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 16867, 9632, 1155, 8, 2822, 20985, 1669, 501, 97564, 18190, 2271, 1972, 1155, 11, 895, 340, 20985, 1418, 8091, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollectUint8(t *testing.T) { v := &Value{data: []uint8{uint8(1), uint8(1), uint8(1), uint8(1), uint8(1), uint8(1)}} collected := v.CollectUint8(func(index int, val uint8) interface{} { return index }) collectedArr := collected.MustInterSlice() if assert.Equal(t, 6, len(collectedArr)) { assert.Equal(t, collectedArr[0], 0) assert.Equal(t, collectedArr[1], 1) assert.Equal(t, collectedArr[2], 2) assert.Equal(t, collectedArr[3], 3) assert.Equal(t, collectedArr[4], 4) assert.Equal(t, collectedArr[5], 5) } }
explode_data.jsonl/23471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 47504, 21570, 23, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 3056, 2496, 23, 90, 2496, 23, 7, 16, 701, 2622, 23, 7, 16, 701, 2622, 23, 7, 16, 701, 2622, 23, 7, 16, 701, 2622, 23, 7, 16, 70...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNamespaceSnapshotNotBootstrapped(t *testing.T) { ctrl := xtest.NewController(t) defer ctrl.Finish() ctx := context.NewBackground() defer ctx.Close() ns, close := newTestNamespace(t) defer close() ns.bootstrapState = Bootstrapping blockSize := ns.Options().RetentionOptions().BlockSize() blockStart := xtime.Now().Truncate(blockSize) require.Equal(t, errNamespaceNotBootstrapped, ns.Snapshot(blockStart, blockStart, nil)) }
explode_data.jsonl/35359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 22699, 15009, 2623, 17919, 495, 5677, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 856, 1944, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 20985, 1669, 2266, 7121, 8706, 741, 16867, 5635, 10421, 2822, 84041, 11, 3265, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnabledByDefault(t *testing.T) { // Events brokerEvent := Eventf(corev1.EventTypeNormal, "BrokerCreated", "Default eventing.knative.dev Broker %q created.", "default") // Objects broker := resources.MakeBroker(testNS, resources.DefaultBrokerName) table := TableTest{{ Name: "bad workqueue key", // Make sure Reconcile handles bad keys. Key: "too/many/parts", }, { Name: "key not found", // Make sure Reconcile handles good keys that don't exist. Key: "foo/not-found", }, { Name: "Trigger is not labeled", Objects: []runtime.Object{ NewTrigger(triggerName, testNS, brokerName), }, Key: testNS + "/" + triggerName, SkipNamespaceValidation: true, WantErr: false, WantEvents: []string{ brokerEvent, }, WantCreates: []runtime.Object{ broker, }, }, { Name: "Trigger is labeled disabled", Objects: []runtime.Object{ NewNamespace(testNS, WithNamespaceLabeled(sugar.InjectionDisabledLabels())), }, Key: testNS + "/" + triggerName, }, { Name: "Trigger is deleted no resources", Objects: []runtime.Object{ NewTrigger(triggerName, testNS, brokerName, WithAnnotation(sugar.DeprecatedInjectionLabelKey, sugar.InjectionEnabledLabelValue), WithTriggerDeleted), }, Key: testNS + "/" + triggerName, }, { Name: "Trigger enabled", Objects: []runtime.Object{ NewTrigger(triggerName, testNS, brokerName, WithAnnotation(sugar.DeprecatedInjectionLabelKey, sugar.InjectionEnabledLabelValue)), }, Key: testNS + "/" + triggerName, SkipNamespaceValidation: true, WantErr: false, WantEvents: []string{ brokerEvent, }, WantCreates: []runtime.Object{ broker, }, }, { Name: "Trigger enabled, broker exists", Objects: []runtime.Object{ NewTrigger(triggerName, testNS, brokerName, WithAnnotation(sugar.DeprecatedInjectionLabelKey, sugar.InjectionEnabledLabelValue), ), resources.MakeBroker(testNS, resources.DefaultBrokerName), }, Key: testNS + "/" + triggerName, SkipNamespaceValidation: true, WantErr: false, }, { Name: "Trigger enabled, broker exists with no label", Objects: []runtime.Object{ NewTrigger(triggerName, testNS, brokerName, WithAnnotation(sugar.DeprecatedInjectionLabelKey, sugar.InjectionDisabledLabelValue)), &v1beta1.Broker{ ObjectMeta: metav1.ObjectMeta{ Namespace: testNS, Name: resources.DefaultBrokerName, }, }, }, Key: testNS + "/" + triggerName, SkipNamespaceValidation: true, WantErr: false, }} logger := logtesting.TestLogger(t) table.Test(t, MakeFactory(func(ctx context.Context, listers *Listers, cmw configmap.Watcher) controller.Reconciler { r := &Reconciler{ eventingClientSet: fakeeventingclient.Get(ctx), brokerLister: listers.GetBrokerLister(), isEnabled: sugar.OnByDefault, } return trigger.NewReconciler(ctx, logger, fakeeventingclient.Get(ctx), listers.GetTriggerLister(), controller.GetEventRecorder(ctx), r, controller.Options{SkipStatusUpdates: true}) }, false, logger)) }
explode_data.jsonl/35291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1281 }
[ 2830, 3393, 5462, 1359, 3675, 1155, 353, 8840, 836, 8, 341, 197, 322, 17627, 198, 2233, 45985, 1556, 1669, 3665, 69, 47867, 85, 16, 89879, 12206, 11, 330, 65545, 11694, 497, 330, 3675, 1538, 287, 5202, 29738, 21523, 52701, 1018, 80, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDispatchQuota_Failure(t *testing.T) { h := &mockHandler{err: errors.New("handler is apathetic to your demands")} a := adapter.QuotaArgs{BestEffort: true, DeduplicationID: "dedupe", QuotaAmount: 54} _, err := executeDispatchQuota(t, h, a) if err == nil { t.Fatal("expected error not found") } }
explode_data.jsonl/64996
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 11283, 2183, 6089, 1400, 9373, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 609, 16712, 3050, 90, 615, 25, 5975, 7121, 445, 17905, 374, 1443, 587, 5298, 311, 697, 18154, 899, 630, 11323, 1669, 12956, 33907, 6089, 4117, 90, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPageWithEmbeddedScriptTag(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages page.Pages) { p := pages[0] if ext == "ad" || ext == "rst" { // TOD(bep) return } checkPageContent(t, p, "<script type='text/javascript'>alert('the script tags are still there, right?');</script>\n", ext) } testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithEmbeddedScript) }
explode_data.jsonl/60614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 2665, 2354, 83466, 5910, 5668, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 6948, 9626, 1669, 2915, 1155, 353, 8840, 836, 11, 1303, 914, 11, 6816, 2150, 68829, 8, 341, 197, 3223, 1669, 6816, 58, 15, 921, 197, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVoteEncoding(t *testing.T) { vote := examplePrecommit() commitSig := vote.CommitSig() cdc := amino.NewCodec() bz1 := cdc.MustMarshalBinaryBare(vote) bz2 := cdc.MustMarshalBinaryBare(commitSig) assert.Equal(t, bz1, bz2) }
explode_data.jsonl/54530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 41412, 14690, 1155, 353, 8840, 836, 8, 341, 5195, 1272, 1669, 3110, 4703, 17413, 741, 197, 17413, 47246, 1669, 6910, 53036, 47246, 741, 1444, 7628, 1669, 41400, 7121, 36913, 741, 2233, 89, 16, 1669, 272, 7628, 50463, 55438, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddProwPlugin(t *testing.T) { t.Parallel() o := TestOptions{} o.Setup() o.Repos = append(o.Repos, "test/repo2") err := o.AddProwPlugins() assert.NoError(t, err) cm, err := o.KubeClient.CoreV1().ConfigMaps(o.NS).Get("plugins", metav1.GetOptions{}) assert.NoError(t, err) pluginConfig := &plugins.Configuration{} yaml.Unmarshal([]byte(cm.Data["plugins.yaml"]), &pluginConfig) assert.Equal(t, "test/repo", pluginConfig.Approve[0].Repos[0]) assert.Equal(t, "test/repo2", pluginConfig.Approve[1].Repos[0]) }
explode_data.jsonl/70867
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 2212, 47, 651, 11546, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 22229, 1669, 3393, 3798, 16094, 22229, 39820, 2822, 22229, 2817, 966, 284, 8737, 10108, 2817, 966, 11, 330, 1944, 10758, 5368, 17, 5130, 9859, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScpGetLocal(t *testing.T) { t.Parallel() v := viper.New() fs := hugofs.NewMem(v) ps := helpers.FilePathSeparator tests := []struct { path string content []byte }{ {"testpath" + ps + "test.txt", []byte(`T€st Content 123 fOO,bar:foo%bAR`)}, {"FOo" + ps + "BaR.html", []byte(`FOo/BaR.html T€st Content 123`)}, {"трям" + ps + "трям", []byte(`T€st трям/трям Content 123`)}, {"은행", []byte(`T€st C은행ontent 123`)}, {"Банковский кассир", []byte(`Банковский кассир T€st Content 123`)}, } for _, test := range tests { r := bytes.NewReader(test.content) err := helpers.WriteToDisk(test.path, r, fs.Source) if err != nil { t.Error(err) } c, err := getLocal(test.path, fs.Source, v) if err != nil { t.Errorf("Error getting resource content: %s", err) } if !bytes.Equal(c, test.content) { t.Errorf("\nExpected: %s\nActual: %s\n", string(test.content), string(c)) } } }
explode_data.jsonl/69217
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 456 }
[ 2830, 3393, 3326, 79, 1949, 7319, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 5195, 1669, 95132, 7121, 741, 53584, 1669, 29784, 36081, 7121, 18816, 3747, 340, 35009, 1669, 30187, 8576, 1820, 16409, 271, 78216, 1669, 3056, 1235, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestExcludeBroadcast(t *testing.T) { s := stack.New([]string{ipv4.ProtocolName}, []string{udp.ProtocolName}, stack.Options{}) const defaultMTU = 65536 id, _ := channel.New(256, defaultMTU, "") if testing.Verbose() { id = sniffer.New(id) } if err := s.CreateNIC(1, id); err != nil { t.Fatalf("CreateNIC failed: %v", err) } if err := s.AddAddress(1, ipv4.ProtocolNumber, header.IPv4Broadcast); err != nil { t.Fatalf("AddAddress failed: %v", err) } if err := s.AddAddress(1, ipv4.ProtocolNumber, header.IPv4Any); err != nil { t.Fatalf("AddAddress failed: %v", err) } s.SetRouteTable([]tcpip.Route{{ Destination: "\x00\x00\x00\x00", Mask: "\x00\x00\x00\x00", Gateway: "", NIC: 1, }}) randomAddr := tcpip.FullAddress{NIC: 1, Addr: "\x0a\x00\x00\x01", Port: 53} var wq waiter.Queue t.Run("WithoutPrimaryAddress", func(t *testing.T) { ep, err := s.NewEndpoint(udp.ProtocolNumber, ipv4.ProtocolNumber, &wq) if err != nil { t.Fatal(err) } defer ep.Close() // Cannot connect using a broadcast address as the source. if err := ep.Connect(randomAddr); err != tcpip.ErrNoRoute { t.Errorf("got ep.Connect(...) = %v, want = %v", err, tcpip.ErrNoRoute) } // However, we can bind to a broadcast address to listen. if err := ep.Bind(tcpip.FullAddress{Addr: header.IPv4Broadcast, Port: 53, NIC: 1}, nil); err != nil { t.Errorf("Bind failed: %v", err) } }) t.Run("WithPrimaryAddress", func(t *testing.T) { ep, err := s.NewEndpoint(udp.ProtocolNumber, ipv4.ProtocolNumber, &wq) if err != nil { t.Fatal(err) } defer ep.Close() // Add a valid primary endpoint address, now we can connect. if err := s.AddAddress(1, ipv4.ProtocolNumber, "\x0a\x00\x00\x02"); err != nil { t.Fatalf("AddAddress failed: %v", err) } if err := ep.Connect(randomAddr); err != nil { t.Errorf("Connect failed: %v", err) } }) }
explode_data.jsonl/44393
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 825 }
[ 2830, 3393, 95239, 43362, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 5611, 7121, 10556, 917, 90, 42676, 19, 54096, 675, 2137, 3056, 917, 90, 31101, 54096, 675, 2137, 5611, 22179, 6257, 692, 4777, 1638, 8505, 52, 284, 220, 21, 20, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_hcsTask_KillExec_2ndExecID_Success(t *testing.T) { lt, _, second := setupTestHcsTask(t) err := lt.KillExec(context.TODO(), second.id, 0xf, false) if err != nil { t.Fatalf("should not have failed, got: %v", err) } if second.state != shimExecStateExited { t.Fatalf("2nd exec should be in exited state got: %v", second.state) } }
explode_data.jsonl/56375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 1523, 4837, 6262, 10102, 483, 10216, 62, 17, 303, 10216, 915, 87161, 1155, 353, 8840, 836, 8, 341, 197, 4832, 11, 8358, 2086, 1669, 6505, 2271, 39, 4837, 6262, 1155, 692, 9859, 1669, 25175, 11352, 483, 10216, 5378, 90988, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGame(t *testing.T) { t.Run("GET /game returns 200", func(t *testing.T) { server := mustMakePlayerServer(t, &poker.StubPlayerStore{}, dummyGame) request := newGameRequest() response := httptest.NewRecorder() server.ServeHTTP(response, request) assertStatus(t, response, http.StatusOK) }) t.Run("start a game with 3 players, send some blind alerts down WS and declare Ruth the winner", func(t *testing.T) { wantedBlindAlert := "Blind is 100" winner := "Ruth" game := &GameSpy{BlindAlert: []byte(wantedBlindAlert)} server := httptest.NewServer(mustMakePlayerServer(t, dummyPlayerStore, game)) ws := mustDialWS(t, "ws"+strings.TrimPrefix(server.URL, "http")+"/ws") defer server.Close() defer ws.Close() writeWSMessage(t, ws, "3") writeWSMessage(t, ws, winner) assertGameStartedWith(t, game, 3) assertFinishCalledWith(t, game, winner) within(t, tenMS, func() { assertWebsocketGotMsg(t, ws, wantedBlindAlert) }) }) }
explode_data.jsonl/4851
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 4868, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 3806, 608, 5804, 4675, 220, 17, 15, 15, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 41057, 1669, 1969, 8078, 4476, 5475, 1155, 11, 609, 79, 10451, 7758, 392, 4476, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcile_InvalidPipelineRunNames(t *testing.T) { // TestReconcile_InvalidPipelineRunNames runs "Reconcile" on several PipelineRuns that have invalid names. // It verifies that reconcile fails, how it fails and which events are triggered. // Note that the code tested here is part of the genreconciler. invalidNames := []string{ "foo/test-pipeline-run-doesnot-exist", "test/invalidformat/t", } tcs := []struct { name string pipelineRun string }{ { name: "invalid-pipeline-run-shd-stop-reconciling", pipelineRun: invalidNames[0], }, { name: "invalid-pipeline-run-name-shd-stop-reconciling", pipelineRun: invalidNames[1], }, } for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { testAssets, cancel := getPipelineRunController(t, test.Data{}) defer cancel() c := testAssets.Controller err := c.Reconciler.Reconcile(testAssets.Ctx, tc.pipelineRun) // No reason to keep reconciling something that doesnt or can't exist if err != nil { t.Errorf("Did not expect to see error when reconciling invalid PipelineRun but saw %q", err) } }) } }
explode_data.jsonl/27279
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 452 }
[ 2830, 3393, 693, 40446, 457, 62, 7928, 34656, 6727, 7980, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 693, 40446, 457, 62, 7928, 34656, 6727, 7980, 8473, 330, 693, 40446, 457, 1, 389, 3807, 40907, 73920, 429, 614, 8318, 5036, 624, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIssue27236(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test;") row := tk.MustQuery(`select extract(hour_second from "-838:59:59.00");`) row.Check(testkit.Rows("-8385959")) tk.MustExec(`drop table if exists t`) tk.MustExec(`create table t(c1 varchar(100));`) tk.MustExec(`insert into t values('-838:59:59.00'), ('700:59:59.00');`) row = tk.MustQuery(`select extract(hour_second from c1) from t order by c1;`) row.Check(testkit.Rows("-8385959", "7005959")) }
explode_data.jsonl/65605
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 42006, 17, 22, 17, 18, 21, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPoolReleaseSizeMax(t *testing.T) { assert := assert.New(t) pool := NewDecoderPool(1) decoder := newMsgpackDecoder() // replace the internal buffer with a big one decoder.buf = bytes.NewBuffer(make([]byte, 1, maxBufferSize+1)) // a decoder is discarded and not reused when the buffer size is too big pool.Release(decoder) anotherDecoder := pool.Borrow("application/msgpack") assert.NotEqual(anotherDecoder, decoder) }
explode_data.jsonl/35768
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 10551, 16077, 1695, 5974, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 85273, 1669, 1532, 20732, 10551, 7, 16, 340, 197, 48110, 1669, 501, 6611, 4748, 20732, 2822, 197, 322, 8290, 279, 5306, 4147, 448, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStackPush(t *testing.T) { stack := New() if actualValue := stack.Empty(); actualValue != true { t.Errorf("Got %v expected %v", actualValue, true) } stack.Push(1) stack.Push(2) stack.Push(3) if actualValue := stack.Values(); actualValue[0].(int) != 3 || actualValue[1].(int) != 2 || actualValue[2].(int) != 1 { t.Errorf("Got %v expected %v", actualValue, "[3,2,1]") } if actualValue := stack.Empty(); actualValue != false { t.Errorf("Got %v expected %v", actualValue, false) } if actualValue := stack.Size(); actualValue != 3 { t.Errorf("Got %v expected %v", actualValue, 3) } if actualValue, ok := stack.Peek(); actualValue != 3 || !ok { t.Errorf("Got %v expected %v", actualValue, 3) } }
explode_data.jsonl/26067
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 4336, 16644, 1155, 353, 8840, 836, 8, 341, 48227, 1669, 1532, 741, 743, 5042, 1130, 1669, 5611, 11180, 2129, 5042, 1130, 961, 830, 341, 197, 3244, 13080, 445, 32462, 1018, 85, 3601, 1018, 85, 497, 5042, 1130, 11, 830, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestMetaValues(t *testing.T) { type InternalSimple struct { APIVersion string `json:"apiVersion,omitempty" yaml:"apiVersion,omitempty"` Kind string `json:"kind,omitempty" yaml:"kind,omitempty"` TestString string `json:"testString" yaml:"testString"` } type ExternalSimple struct { APIVersion string `json:"apiVersion,omitempty" yaml:"apiVersion,omitempty"` Kind string `json:"kind,omitempty" yaml:"kind,omitempty"` TestString string `json:"testString" yaml:"testString"` } s := NewScheme() s.AddKnownTypeWithName("", "Simple", &InternalSimple{}) s.AddKnownTypeWithName("externalVersion", "Simple", &ExternalSimple{}) internalToExternalCalls := 0 externalToInternalCalls := 0 // Register functions to verify that scope.Meta() gets set correctly. err := s.AddConversionFuncs( func(in *InternalSimple, out *ExternalSimple, scope Scope) error { t.Logf("internal -> external") if e, a := "", scope.Meta().SrcVersion; e != a { t.Fatalf("Expected '%v', got '%v'", e, a) } if e, a := "externalVersion", scope.Meta().DestVersion; e != a { t.Fatalf("Expected '%v', got '%v'", e, a) } scope.Convert(&in.TestString, &out.TestString, 0) internalToExternalCalls++ return nil }, func(in *ExternalSimple, out *InternalSimple, scope Scope) error { t.Logf("external -> internal") if e, a := "externalVersion", scope.Meta().SrcVersion; e != a { t.Errorf("Expected '%v', got '%v'", e, a) } if e, a := "", scope.Meta().DestVersion; e != a { t.Fatalf("Expected '%v', got '%v'", e, a) } scope.Convert(&in.TestString, &out.TestString, 0) externalToInternalCalls++ return nil }, ) if err != nil { t.Fatalf("unexpected error: %v", err) } simple := &InternalSimple{ TestString: "foo", } s.Log(t) // Test Encode, Decode, and DecodeInto data, err := s.EncodeToVersion(simple, "externalVersion") if err != nil { t.Fatalf("unexpected error: %v", err) } t.Logf(string(data)) obj2, err := s.Decode(data) if err != nil { t.Fatalf("unexpected error: %v", err) } if _, ok := obj2.(*InternalSimple); !ok { t.Fatalf("Got wrong type") } if e, a := simple, obj2; !reflect.DeepEqual(e, a) { t.Errorf("Expected:\n %#v,\n Got:\n %#v", e, a) } obj3 := &InternalSimple{} if err := s.DecodeInto(data, obj3); err != nil { t.Fatalf("unexpected error: %v", err) } if e, a := simple, obj3; !reflect.DeepEqual(e, a) { t.Errorf("Expected:\n %#v,\n Got:\n %#v", e, a) } // Test Convert external := &ExternalSimple{} err = s.Convert(simple, external) if err != nil { t.Fatalf("Unexpected error: %v", err) } if e, a := simple.TestString, external.TestString; e != a { t.Errorf("Expected %v, got %v", e, a) } // Encode and Convert should each have caused an increment. if e, a := 2, internalToExternalCalls; e != a { t.Errorf("Expected %v, got %v", e, a) } // Decode and DecodeInto should each have caused an increment. if e, a := 2, externalToInternalCalls; e != a { t.Errorf("Expected %v, got %v", e, a) } }
explode_data.jsonl/34481
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1231 }
[ 2830, 3393, 12175, 6227, 1155, 353, 8840, 836, 8, 341, 13158, 15412, 16374, 2036, 341, 197, 197, 7082, 5637, 914, 1565, 2236, 2974, 2068, 5637, 20478, 1, 32246, 2974, 2068, 5637, 20478, 8805, 197, 197, 10629, 981, 914, 1565, 2236, 2974,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCollectObjxMap(t *testing.T) { v := &Value{data: [](Map){(Map)(New(1)), (Map)(New(1)), (Map)(New(1)), (Map)(New(1)), (Map)(New(1)), (Map)(New(1))}} collected := v.CollectObjxMap(func(index int, val Map) interface{} { return index }) collectedArr := collected.MustInterSlice() if assert.Equal(t, 6, len(collectedArr)) { assert.Equal(t, collectedArr[0], 0) assert.Equal(t, collectedArr[1], 1) assert.Equal(t, collectedArr[2], 2) assert.Equal(t, collectedArr[3], 3) assert.Equal(t, collectedArr[4], 4) assert.Equal(t, collectedArr[5], 5) } }
explode_data.jsonl/23399
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 47504, 5261, 87, 2227, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 39444, 2227, 6098, 7, 2227, 2376, 3564, 7, 16, 5731, 320, 2227, 2376, 3564, 7, 16, 5731, 320, 2227, 2376, 3564, 7, 16, 5731, 320...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetIntValue(t *testing.T) { client := createMockApolloConfig(120) defaultValue := 100000 //test default v := client.GetIntValue("joe", defaultValue) Assert(t, defaultValue, Equal(v)) //normal value v = client.GetIntValue("int", defaultValue) Assert(t, 1, Equal(v)) //error type v = client.GetIntValue("float", defaultValue) Assert(t, defaultValue, Equal(v)) }
explode_data.jsonl/17962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 85097, 1130, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 1855, 11571, 95909, 2648, 7, 16, 17, 15, 340, 11940, 1130, 1669, 220, 16, 15, 15, 15, 15, 15, 271, 197, 322, 1944, 1638, 198, 5195, 1669, 2943, 43148, 1130, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNamespaceNeedsFlushAllSuccess(t *testing.T) { ctrl := xtest.NewController(t) defer ctrl.Finish() var ( shards = sharding.NewShards([]uint32{0, 2, 4}, shard.Available) dopts = DefaultTestOptions() hashFn = func(identifier ident.ID) uint32 { return shards[0].ID() } ) metadata, err := namespace.NewMetadata(defaultTestNs1ID, defaultTestNs1Opts) require.NoError(t, err) shardSet, err := sharding.NewShardSet(shards, hashFn) require.NoError(t, err) ropts := metadata.Options().RetentionOptions() at := xtime.UnixNano(2 * ropts.RetentionPeriod()) dopts = dopts.SetClockOptions(dopts.ClockOptions().SetNowFn(func() time.Time { return at.ToTime() })) blockStart := retention.FlushTimeEnd(ropts, at) oNs, err := newDatabaseNamespace(metadata, namespace.NewRuntimeOptionsManager(metadata.ID().String()), shardSet, nil, nil, nil, dopts) require.NoError(t, err) ns := oNs.(*dbNamespace) for _, s := range shards { shard := NewMockdatabaseShard(ctrl) shard.EXPECT().ID().Return(s.ID()).AnyTimes() shard.EXPECT().FlushState(blockStart).Return(fileOpState{ WarmStatus: warmStatus{ DataFlushed: fileOpSuccess, }, }, nil).AnyTimes() ns.shards[s.ID()] = shard } assertNeedsFlush(t, ns, blockStart, blockStart, false) }
explode_data.jsonl/35370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 500 }
[ 2830, 3393, 22699, 65064, 46874, 2403, 7188, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 856, 1944, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 2405, 2399, 197, 36196, 2347, 284, 557, 28410, 7121, 2016, 2347, 10556, 2496, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAllGraphWeights(t *testing.T) { gr := tests.NewTestEdgeWeightedGraph(6, []tests.TestWeightedEdge{ {V1: 0, V2: 1, Weight: 1.2}, {V1: 1, V2: 2, Weight: 2.3}, {V1: 0, V2: 3, Weight: 3.1}, {V1: 3, V2: 2, Weight: 4.1}, {V1: 4, V2: 3, Weight: 5.1}, {V1: 4, V2: 5, Weight: 1.6}, {V1: 5, V2: 0, Weight: 2.2}, }) assert.Equal(t, []float64{1.2, 3.1, 2.2, 2.3, 4.1, 5.1, 1.6}, AllGraphWeights(gr)) assert.InDelta(t, 19.6, TotalGraphWeight(gr), 0.0001) }
explode_data.jsonl/51159
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 253 }
[ 2830, 3393, 2403, 11212, 55630, 1155, 353, 8840, 836, 8, 341, 90059, 1669, 7032, 7121, 2271, 11656, 8295, 291, 11212, 7, 21, 11, 3056, 23841, 8787, 8295, 291, 11656, 515, 197, 197, 90, 53, 16, 25, 220, 15, 11, 647, 17, 25, 220, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapProxy_GetWithNonSerializableKey(t *testing.T) { _, err := mp.Get(student{}) AssertErrorNotNil(t, err, "get did not return an error for nonserializable key") mp.Clear() }
explode_data.jsonl/57048
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 2227, 16219, 13614, 2354, 8121, 29268, 1592, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 2234, 39004, 37790, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, 455, 1521, 537, 470, 458, 1465, 369, 2477, 10182, 8335...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPoliciesListV1(t *testing.T) { f := newFixture(t) put := newReqV1(http.MethodPut, "/policies/1", testMod) f.server.Handler.ServeHTTP(f.recorder, put) if f.recorder.Code != 200 { t.Fatalf("Expected success but got %v", f.recorder) } f.reset() list := newReqV1(http.MethodGet, "/policies", "") f.server.Handler.ServeHTTP(f.recorder, list) if f.recorder.Code != 200 { t.Fatalf("Expected success but got %v", f.recorder) } // var policies []*PolicyV1 var response types.PolicyListResponseV1 err := util.NewJSONDecoder(f.recorder.Body).Decode(&response) if err != nil { t.Fatalf("Expected policy list but got error: %v", err) } expected := []types.PolicyV1{ newPolicy("1", testMod), } if len(expected) != len(response.Result) { t.Fatalf("Expected %d policies but got: %v", len(expected), response.Result) } for i := range expected { if !expected[i].Equal(response.Result[i]) { t.Fatalf("Expected policies to be equal. Expected:\n\n%v\n\nGot:\n\n%v\n", expected, response.Result) } } }
explode_data.jsonl/79012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 414 }
[ 2830, 3393, 47, 42038, 852, 53, 16, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 340, 45062, 1669, 501, 27234, 53, 16, 19886, 20798, 19103, 11, 3521, 79, 42038, 14, 16, 497, 1273, 4459, 340, 1166, 12638, 31010, 83535, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestInputParametersAsJson(t *testing.T) { controller := newController() wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") wf := unmarshalWF(inputParametersAsJson) wf, err := wfcset.Create(wf) assert.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate() updatedWf, err := wfcset.Get(wf.Name, metav1.GetOptions{}) assert.NoError(t, err) found := false for _, node := range updatedWf.Status.Nodes { if node.Type == wfv1.NodeTypePod { expectedJson := `[{"name":"parameter1","value":"value1"},{"name":"parameter2","value":"value2"}]` assert.Equal(t, expectedJson, *node.Inputs.Parameters[0].Value) found = true } } assert.Equal(t, true, found) }
explode_data.jsonl/54367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 2505, 9706, 2121, 5014, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 501, 2051, 741, 6692, 8316, 746, 1669, 6461, 1418, 69, 2972, 746, 18979, 45926, 73, 53, 16, 7141, 16, 1005, 6776, 38140, 445, 5130, 6692, 69, 1669, 650, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUserCredentialsTwoFiles(t *testing.T) { if server.VERSION[0] == '1' { t.Skip() } ts := runTrustServer() defer ts.Shutdown() userJWTFile := createTmpFile(t, []byte(uJWT)) defer os.Remove(userJWTFile) userSeedFile := createTmpFile(t, uSeed) defer os.Remove(userSeedFile) url := fmt.Sprintf("nats://127.0.0.1:%d", TEST_PORT) nc, err := Connect(url, UserCredentials(userJWTFile, userSeedFile)) if err != nil { t.Fatalf("Expected to connect, got %v", err) } nc.Close() }
explode_data.jsonl/44925
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 1474, 27025, 11613, 10809, 1155, 353, 8840, 836, 8, 341, 743, 3538, 31372, 58, 15, 60, 621, 364, 16, 6, 341, 197, 3244, 57776, 741, 197, 532, 57441, 1669, 1598, 45548, 5475, 741, 16867, 10591, 10849, 18452, 2822, 19060, 55...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetHistoricCandles(t *testing.T) { currencyPair, err := currency.NewPairFromString("BTC-USDT") if err != nil { t.Fatal(err) } startTime := time.Unix(1546300800, 0) end := time.Unix(1577836799, 0) _, err = b.GetHistoricCandles(context.Background(), currencyPair, asset.Spot, startTime, end, kline.OneDay) if err != nil { t.Error(err) } _, err = b.GetHistoricCandles(context.Background(), currencyPair, asset.Spot, startTime, end, kline.Interval(time.Hour*7)) if err == nil { t.Fatal("unexpected result") } }
explode_data.jsonl/76692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 1949, 48983, 292, 34, 20125, 1155, 353, 8840, 836, 8, 341, 1444, 5088, 12443, 11, 1848, 1669, 11413, 7121, 12443, 44491, 445, 59118, 32340, 10599, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 21375...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDynamicSource_Bootstrap(t *testing.T) { config, stop := framework.RunControlPlane(t) defer stop() kubeClient, _, _, _ := framework.NewClients(t, config) namespace := "testns" ns := &corev1.Namespace{ObjectMeta: metav1.ObjectMeta{Name: namespace}} _, err := kubeClient.CoreV1().Namespaces().Create(context.TODO(), ns, metav1.CreateOptions{}) if err != nil { t.Fatal(err) } log := logtesting.TestLogger{T: t} source := tls.DynamicSource{ DNSNames: []string{"example.com"}, Authority: &authority.DynamicAuthority{ SecretNamespace: namespace, SecretName: "testsecret", RESTConfig: config, Log: log, }, Log: log, } stopCh := make(chan struct{}) doneCh := make(chan struct{}) defer func() { close(stopCh) <-doneCh }() // run the dynamic authority controller in the background go func() { defer close(doneCh) if err := source.Run(stopCh); err != nil && !errors.Is(err, context.Canceled) { t.Fatalf("Unexpected error running source: %v", err) } }() // allow the controller 5s to provision the Secret - this is far longer // than it should ever take. if err := wait.Poll(time.Millisecond*500, time.Second*5, func() (done bool, err error) { cert, err := source.GetCertificate(nil) if err == tls.ErrNotAvailable { t.Logf("GetCertificate has no certificate available, waiting...") return false, nil } if err != nil { return false, err } if cert == nil { t.Errorf("Returned certificate is nil") } t.Logf("Got non-nil certificate from dynamic source") return true, nil }); err != nil { t.Errorf("Failed waiting for source to return a certificate: %v", err) return } }
explode_data.jsonl/32234
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 640 }
[ 2830, 3393, 21752, 3608, 62, 45511, 1155, 353, 8840, 836, 8, 341, 25873, 11, 2936, 1669, 12626, 16708, 3273, 34570, 1155, 340, 16867, 2936, 2822, 16463, 3760, 2959, 11, 8358, 8358, 716, 1669, 12626, 7121, 47174, 1155, 11, 2193, 692, 566...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAggregate_ApplyFormatter(t *testing.T) { expect := ttesting.NewExpect(t) core.TypeRegistry.Register(applyFormatterMockA{}) core.TypeRegistry.Register(applyFormatterMockB{}) config := core.NewPluginConfig("", "format.Aggregate") config.Override("ApplyTo", "") config.Override("Modulators", []interface{}{ "format.applyFormatterMockA", map[string]interface{}{ "format.applyFormatterMockB": map[string]interface{}{ "foo": "bar", }, }, }) plugin, err := core.NewPluginWithConfig(config) expect.NoError(err) formatter, casted := plugin.(*Aggregate) expect.True(casted) msg := core.NewMessage(nil, []byte("foo"), nil, core.InvalidStreamID) err = formatter.ApplyFormatter(msg) expect.NoError(err) expect.Equal("fooAB", string(msg.GetPayload())) expect.Equal("bar", configInjection) }
explode_data.jsonl/979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 309 }
[ 2830, 3393, 64580, 36117, 398, 14183, 1155, 353, 8840, 836, 8, 341, 24952, 1669, 259, 8840, 7121, 17536, 1155, 692, 71882, 10184, 15603, 19983, 7, 10280, 14183, 11571, 32, 37790, 71882, 10184, 15603, 19983, 7, 10280, 14183, 11571, 33, 625...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPingTimeout(t *testing.T) { replied, timedOut, spurious, err := test(true, false, false, false) switch { case timedOut: case replied: t.Errorf("got reply, expected timeout") case spurious: t.Errorf("got spurious message, expected timeout") case err != nil: t.Errorf("got error %v, expected none", err) } }
explode_data.jsonl/30925
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 69883, 7636, 1155, 353, 8840, 836, 8, 341, 17200, 3440, 11, 25182, 2662, 11, 978, 27526, 11, 1848, 1669, 1273, 3715, 11, 895, 11, 895, 11, 895, 340, 8961, 341, 2722, 25182, 2662, 510, 2722, 19995, 510, 197, 3244, 13080, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSubtractWithMinimum(t *testing.T) { buildBalanceTable := func() *adt.BalanceTable { rt := mock.NewBuilder(address.Undef).Build(t) store := adt.AsStore(rt) emptyMap, err := adt.MakeEmptyMap(store, builtin.DefaultHamtBitwidth) require.NoError(t, err) bt, err := adt.AsBalanceTable(store, tutil.MustRoot(t, emptyMap)) require.NoError(t, err) return bt } addr := tutil.NewIDAddr(t, 100) zeroAmt := abi.NewTokenAmount(0) t.Run("ok with zero balance", func(t *testing.T) { bt := buildBalanceTable() s, err := bt.SubtractWithMinimum(addr, zeroAmt, zeroAmt) require.NoError(t, err) require.EqualValues(t, zeroAmt, s) }) t.Run("withdraw available when account does not have sufficient balance", func(t *testing.T) { bt := buildBalanceTable() require.NoError(t, bt.Add(addr, abi.NewTokenAmount(5))) s, err := bt.SubtractWithMinimum(addr, abi.NewTokenAmount(2), abi.NewTokenAmount(4)) require.NoError(t, err) require.EqualValues(t, abi.NewTokenAmount(1), s) remaining, err := bt.Get(addr) require.NoError(t, err) require.EqualValues(t, abi.NewTokenAmount(4), remaining) }) t.Run("account has sufficient balance", func(t *testing.T) { bt := buildBalanceTable() require.NoError(t, bt.Add(addr, abi.NewTokenAmount(5))) s, err := bt.SubtractWithMinimum(addr, abi.NewTokenAmount(3), abi.NewTokenAmount(2)) require.NoError(t, err) require.EqualValues(t, abi.NewTokenAmount(3), s) remaining, err := bt.Get(addr) require.NoError(t, err) require.EqualValues(t, abi.NewTokenAmount(2), remaining) }) }
explode_data.jsonl/26116
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 628 }
[ 2830, 3393, 3136, 2144, 2354, 28695, 1155, 353, 8840, 836, 8, 341, 69371, 21190, 2556, 1669, 2915, 368, 353, 25720, 1785, 4978, 2556, 341, 197, 55060, 1669, 7860, 7121, 3297, 15434, 32528, 568, 11066, 1155, 340, 197, 57279, 1669, 993, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncodeEmpty(t *testing.T) { var buf bytes.Buffer pointer := NewPointer("", 0, nil) _, err := EncodePointer(&buf, pointer) assert.Equal(t, nil, err) bufReader := bufio.NewReader(&buf) val, err := bufReader.ReadString('\n') assert.Equal(t, "", val) assert.Equal(t, "EOF", err.Error()) }
explode_data.jsonl/50433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 32535, 3522, 1155, 353, 8840, 836, 8, 341, 2405, 6607, 5820, 22622, 198, 197, 16044, 1669, 1532, 9084, 19814, 220, 15, 11, 2092, 340, 197, 6878, 1848, 1669, 56562, 9084, 2099, 5909, 11, 7445, 340, 6948, 12808, 1155, 11, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestComputePodActions(t *testing.T) { _, _, m, err := createTestRuntimeManager() require.NoError(t, err) // Createing a pair reference pod and status for the test cases to refer // the specific fields. basePod, baseStatus := makeBasePodAndStatus() noAction := podActions{ SandboxID: baseStatus.SandboxStatuses[0].Id, ContainersToStart: []int{}, ContainersToKill: map[kubecontainer.ContainerID]containerToKillInfo{}, } for desc, test := range map[string]struct { mutatePodFn func(*v1.Pod) mutateStatusFn func(*kubecontainer.PodStatus) actions podActions }{ "everying is good; do nothing": { actions: noAction, }, "start pod sandbox and all containers for a new pod": { mutateStatusFn: func(status *kubecontainer.PodStatus) { // No container or sandbox exists. status.SandboxStatuses = []*runtimeapi.PodSandboxStatus{} status.ContainerStatuses = []*kubecontainer.ContainerStatus{} }, actions: podActions{ KillPod: true, CreateSandbox: true, Attempt: uint32(0), ContainersToStart: []int{0, 1, 2}, ContainersToKill: getKillMap(basePod, baseStatus, []int{}), }, }, "restart exited containers if RestartPolicy == Always": { mutatePodFn: func(pod *v1.Pod) { pod.Spec.RestartPolicy = v1.RestartPolicyAlways }, mutateStatusFn: func(status *kubecontainer.PodStatus) { // The first container completed, restart it, status.ContainerStatuses[0].State = kubecontainer.ContainerStateExited status.ContainerStatuses[0].ExitCode = 0 // The second container exited with failure, restart it, status.ContainerStatuses[1].State = kubecontainer.ContainerStateExited status.ContainerStatuses[1].ExitCode = 111 }, actions: podActions{ SandboxID: baseStatus.SandboxStatuses[0].Id, ContainersToStart: []int{0, 1}, ContainersToKill: getKillMap(basePod, baseStatus, []int{}), }, }, "restart failed containers if RestartPolicy == OnFailure": { mutatePodFn: func(pod *v1.Pod) { pod.Spec.RestartPolicy = v1.RestartPolicyOnFailure }, mutateStatusFn: func(status *kubecontainer.PodStatus) { // The first container completed, don't restart it, status.ContainerStatuses[0].State = kubecontainer.ContainerStateExited status.ContainerStatuses[0].ExitCode = 0 // The second container exited with failure, restart it, status.ContainerStatuses[1].State = kubecontainer.ContainerStateExited status.ContainerStatuses[1].ExitCode = 111 }, actions: podActions{ SandboxID: baseStatus.SandboxStatuses[0].Id, ContainersToStart: []int{1}, ContainersToKill: getKillMap(basePod, baseStatus, []int{}), }, }, "don't restart containers if RestartPolicy == Never": { mutatePodFn: func(pod *v1.Pod) { pod.Spec.RestartPolicy = v1.RestartPolicyNever }, mutateStatusFn: func(status *kubecontainer.PodStatus) { // Don't restart any containers. status.ContainerStatuses[0].State = kubecontainer.ContainerStateExited status.ContainerStatuses[0].ExitCode = 0 status.ContainerStatuses[1].State = kubecontainer.ContainerStateExited status.ContainerStatuses[1].ExitCode = 111 }, actions: noAction, }, "Kill pod and recreate everything if the pod sandbox is dead, and RestartPolicy == Always": { mutatePodFn: func(pod *v1.Pod) { pod.Spec.RestartPolicy = v1.RestartPolicyAlways }, mutateStatusFn: func(status *kubecontainer.PodStatus) { status.SandboxStatuses[0].State = runtimeapi.PodSandboxState_SANDBOX_NOTREADY }, actions: podActions{ KillPod: true, CreateSandbox: true, SandboxID: baseStatus.SandboxStatuses[0].Id, Attempt: uint32(1), ContainersToStart: []int{0, 1, 2}, ContainersToKill: getKillMap(basePod, baseStatus, []int{}), }, }, "Kill pod and recreate all containers (except for the succeeded one) if the pod sandbox is dead, and RestartPolicy == OnFailure": { mutatePodFn: func(pod *v1.Pod) { pod.Spec.RestartPolicy = v1.RestartPolicyOnFailure }, mutateStatusFn: func(status *kubecontainer.PodStatus) { status.SandboxStatuses[0].State = runtimeapi.PodSandboxState_SANDBOX_NOTREADY status.ContainerStatuses[1].State = kubecontainer.ContainerStateExited status.ContainerStatuses[1].ExitCode = 0 }, actions: podActions{ KillPod: true, CreateSandbox: true, SandboxID: baseStatus.SandboxStatuses[0].Id, Attempt: uint32(1), ContainersToStart: []int{0, 2}, ContainersToKill: getKillMap(basePod, baseStatus, []int{}), }, }, "Kill pod and recreate all containers if the PodSandbox does not have an IP": { mutateStatusFn: func(status *kubecontainer.PodStatus) { status.SandboxStatuses[0].Network.Ip = "" }, actions: podActions{ KillPod: true, CreateSandbox: true, SandboxID: baseStatus.SandboxStatuses[0].Id, Attempt: uint32(1), ContainersToStart: []int{0, 1, 2}, ContainersToKill: getKillMap(basePod, baseStatus, []int{}), }, }, "Kill and recreate the container if the container's spec changed": { mutatePodFn: func(pod *v1.Pod) { pod.Spec.RestartPolicy = v1.RestartPolicyAlways }, mutateStatusFn: func(status *kubecontainer.PodStatus) { status.ContainerStatuses[1].Hash = uint64(432423432) }, actions: podActions{ SandboxID: baseStatus.SandboxStatuses[0].Id, ContainersToKill: getKillMap(basePod, baseStatus, []int{1}), ContainersToStart: []int{1}, }, // TODO: Add a test case for containers which failed the liveness // check. Will need to fake the livessness check result. }, } { pod, status := makeBasePodAndStatus() if test.mutatePodFn != nil { test.mutatePodFn(pod) } if test.mutateStatusFn != nil { test.mutateStatusFn(status) } actions := m.computePodActions(pod, status) verifyActions(t, &test.actions, &actions, desc) } }
explode_data.jsonl/14243
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2413 }
[ 2830, 3393, 46254, 23527, 12948, 1155, 353, 8840, 836, 8, 341, 197, 6878, 8358, 296, 11, 1848, 1669, 1855, 2271, 15123, 2043, 741, 17957, 35699, 1155, 11, 1848, 692, 197, 322, 4230, 287, 264, 6716, 5785, 7509, 323, 2639, 369, 279, 127...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAppendEmptyLabelsIgnored(t *testing.T) { db, delete := openTestDB(t, nil) defer func() { testutil.Ok(t, db.Close()) delete() }() app1 := db.Appender() ref1, err := app1.Add(labels.FromStrings("a", "b"), 123, 0) testutil.Ok(t, err) // Construct labels manually so there is an empty label. ref2, err := app1.Add(labels.Labels{labels.Label{Name: "a", Value: "b"}, labels.Label{Name: "c", Value: ""}}, 124, 0) testutil.Ok(t, err) // Should be the same series. testutil.Equals(t, ref1, ref2) err = app1.Commit() testutil.Ok(t, err) }
explode_data.jsonl/64363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 23877, 3522, 23674, 43337, 3018, 1155, 353, 8840, 836, 8, 341, 20939, 11, 3698, 1669, 1787, 2271, 3506, 1155, 11, 2092, 340, 16867, 2915, 368, 341, 197, 18185, 1314, 54282, 1155, 11, 2927, 10421, 2398, 197, 15618, 741, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddBuildOrImage3(t *testing.T) { // Test data project := &model.CGProject{} service := &spec.ServiceConfig{} expectedService := &spec.ServiceConfig{ Name: "database-spice", Image: "ghcr.io/chillibits/spice:0.3.0", } testManifest := diu.DockerManifest{ SchemaV2Manifest: diu.SchemaV2Manifest{ Layers: []diu.Layer{ {}, {}, {}, {}, {}, {}, {}, }, }, } // Mock functions textQuestionCallCounter := 0 textQuestionWithDefault = func(question, defaultValue string) (result string) { textQuestionCallCounter++ if textQuestionCallCounter == 1 { assert.Equal(t, "From which registry do you want to pick?", question) assert.Equal(t, "docker.io", defaultValue) result = "ghcr.io" } else { assert.Equal(t, "Which Image do you want to use? (e.g. chillibits/ccom:0.8.0)", question) assert.Equal(t, "hello-world", defaultValue) result = "chillibits/spice:0.3.0" } return } yesNoQuestion = func(question string, defaultValue bool) (result bool) { assert.Equal(t, "Build from source?", question) assert.False(t, defaultValue) return false } logError = func(message string, exit bool) { assert.Equal(t, "The Dockerfile could not be found", message) assert.True(t, exit) } fileExists = func(path string) bool { return false } getImageManifest = func(image string) (diu.DockerManifest, error) { assert.Equal(t, "ghcr.io/chillibits/spice:0.3.0", image) return testManifest, nil } pel = func() {} p = func(text string) { assert.Equal(t, "Searching image ... ", text) } success = func(text string) { assert.Equal(t, " found - 7 layer(s)", text) } // Execute test AddBuildOrImage(service, project, model.TemplateTypeDatabase) // Assert assert.Equal(t, expectedService, service) }
explode_data.jsonl/22029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 684 }
[ 2830, 3393, 2212, 11066, 2195, 1906, 18, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 821, 198, 72470, 1669, 609, 2528, 91658, 7849, 16094, 52934, 1669, 609, 9535, 13860, 2648, 16094, 42400, 1860, 1669, 609, 9535, 13860, 2648, 515, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUserFriendDeleteNOTEXIST(t *testing.T) { resp, _ := sendDelete("http://localhost:8080/ABCDEFGH&q=undofriendship", UserFriendRequestEXAMPLE1, auth.Header.Get("Authorization")) response := responseToString(resp) compareResults(t, response, HyperText.CustomResponses["not-found-entity"]) }
explode_data.jsonl/59361
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 1474, 41637, 6435, 14065, 92827, 1155, 353, 8840, 836, 8, 341, 34653, 11, 716, 1669, 3624, 6435, 445, 1254, 1110, 8301, 25, 23, 15, 23, 15, 14, 67004, 39, 62735, 28, 1241, 1055, 5039, 5270, 497, 2657, 41637, 1900, 95875, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestActionsAndAttributes(t *testing.T) { workflow, _ := fixture(t, "valid/actions-and-attributes.workflow") actionA := workflow.Actions[0] assert.Equal(t, "a", actionA.Identifier) assert.Equal(t, 0, len(actionA.Needs)) assert.Equal(t, &model.UsesPath{Path: "x"}, actionA.Uses) assert.Equal(t, &model.StringCommand{Value: "cmd"}, actionA.Runs) assert.Equal(t, map[string]string{"PATH": "less traveled by", "HOME": "where the heart is"}, actionA.Env) actionB := workflow.Actions[1] assert.Equal(t, "b", actionB.Identifier) assert.Equal(t, &model.UsesPath{Path: "y"}, actionB.Uses) assert.Equal(t, []string{"a"}, actionB.Needs) assert.Equal(t, &model.ListCommand{Values: []string{"foo", "bar"}}, actionB.Args) assert.Equal(t, []string{"THE", "CURRENCY", "OF", "INTIMACY"}, actionB.Secrets) }
explode_data.jsonl/55660
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 12948, 3036, 10516, 1155, 353, 8840, 836, 8, 341, 197, 56249, 11, 716, 1669, 12507, 1155, 11, 330, 1891, 39121, 9777, 12, 12340, 72774, 5130, 38933, 32, 1669, 28288, 72044, 58, 15, 921, 6948, 12808, 1155, 11, 330, 64, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHashKeyValueRepoContainsValue(t *testing.T) { repo := NewHashKeyValueRepo(&mockRepo{}) item := "a" _, err := repo.Save(item) checkError(err, t) if !repo.ContainsValue(item) { t.Errorf("Could not find %v", item) } }
explode_data.jsonl/79720
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 6370, 72082, 25243, 23805, 1130, 1155, 353, 8840, 836, 8, 341, 17200, 5368, 1669, 1532, 6370, 72082, 25243, 2099, 16712, 25243, 6257, 692, 22339, 1669, 330, 64, 698, 197, 6878, 1848, 1669, 15867, 13599, 5393, 340, 25157, 1454,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDb_L0_CompactionBug_Issue44_a(t *testing.T) { h := newDbHarness(t) defer h.close() h.reopenDB() h.put("b", "v") h.reopenDB() h.delete("b") h.delete("a") h.reopenDB() h.delete("a") h.reopenDB() h.put("a", "v") h.reopenDB() h.reopenDB() h.getKeyVal("(a->v)") h.waitCompaction() h.getKeyVal("(a->v)") }
explode_data.jsonl/6030
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 7994, 2351, 15, 920, 14435, 1311, 46773, 7959, 83890, 19, 19, 4306, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 501, 7994, 74248, 1155, 340, 16867, 305, 4653, 2822, 9598, 1327, 2508, 3506, 741, 9598, 3597, 445, 65, 497, 330,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadChallengeTx_invalidOperationNoSourceAccount(t *testing.T) { serverKP := newKeypair0() txSource := NewSimpleAccount(serverKP.Address(), -1) op := ManageData{ Name: "testanchor.stellar.org auth", Value: []byte(base64.StdEncoding.EncodeToString(make([]byte, 48))), } tx, err := NewTransaction( TransactionParams{ SourceAccount: &txSource, IncrementSequenceNum: true, Operations: []Operation{&op}, BaseFee: MinBaseFee, Timebounds: NewTimeout(300), }, ) assert.NoError(t, err) tx, err = tx.Sign(network.TestNetworkPassphrase, serverKP) assert.NoError(t, err) tx64, err := tx.Base64() require.NoError(t, err) _, _, _, err = ReadChallengeTx(tx64, serverKP.Address(), network.TestNetworkPassphrase, "testwebauth.stellar.org", []string{"testanchor.stellar.org"}) assert.EqualError(t, err, "operation should have a source account") }
explode_data.jsonl/20709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 372 }
[ 2830, 3393, 4418, 62078, 31584, 31433, 8432, 2753, 3608, 7365, 1155, 353, 8840, 836, 8, 341, 41057, 65036, 1669, 501, 6608, 1082, 1310, 15, 741, 46237, 3608, 1669, 1532, 16374, 7365, 21421, 65036, 26979, 1507, 481, 16, 340, 39703, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_PullIsApproved(t *testing.T) { cases := []struct { description string testdata string exp bool }{ { "no approvers", "pull-unapproved.json", false, }, { "approver is the author", "pull-approved-by-author.json", false, }, { "single approver", "pull-approved.json", true, }, { "two approvers one author", "pull-approved-multiple.json", true, }, } for _, c := range cases { t.Run(c.description, func(t *testing.T) { json, err := ioutil.ReadFile(filepath.Join("testdata", c.testdata)) Ok(t, err) testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { // The first request should hit this URL. case "/2.0/repositories/owner/repo/pullrequests/1": w.Write(json) // nolint: errcheck return default: t.Errorf("got unexpected request at %q", r.RequestURI) http.Error(w, "not found", http.StatusNotFound) return } })) defer testServer.Close() client := bitbucketcloud.NewClient(http.DefaultClient, "user", "pass", "runatlantis.io") client.BaseURL = testServer.URL repo, err := models.NewRepo(models.BitbucketServer, "owner/repo", "https://bitbucket.org/owner/repo.git", "user", "token") Ok(t, err) approved, err := client.PullIsApproved(repo, models.PullRequest{ Num: 1, HeadBranch: "branch", Author: "author", BaseRepo: repo, }) Ok(t, err) Equals(t, c.exp, approved) }) } }
explode_data.jsonl/53234
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 691 }
[ 2830, 3393, 2959, 1088, 617, 3872, 59651, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 18185, 691, 262, 914, 198, 197, 48558, 260, 1807, 198, 197, 59403, 197, 197, 515, 298, 197, 1, 215...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_encode_byte_array(t *testing.T) { should := require.New(t) bytes, err := json.Marshal([]byte{1, 2, 3}) should.Nil(err) should.Equal(`"AQID"`, string(bytes)) bytes, err = jsoner.DefaultAPI().Marshal([]byte{1, 2, 3}) should.Nil(err) should.Equal(`"AQID"`, string(bytes)) }
explode_data.jsonl/57882
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 11224, 19737, 3858, 1155, 353, 8840, 836, 8, 341, 197, 5445, 1669, 1373, 7121, 1155, 340, 70326, 11, 1848, 1669, 2951, 37271, 10556, 3782, 90, 16, 11, 220, 17, 11, 220, 18, 3518, 197, 5445, 59678, 3964, 340, 197, 5445, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestManifestGeneratePilot(t *testing.T) { runTestGroup(t, testGroup{ { desc: "pilot_default", diffIgnore: "CustomResourceDefinition:*:*,ConfigMap:*:istio", }, { desc: "pilot_k8s_settings", diffSelect: "Deployment:*:istiod,HorizontalPodAutoscaler:*:istiod", }, { desc: "pilot_override_values", diffSelect: "Deployment:*:istiod,HorizontalPodAutoscaler:*:istiod", }, { desc: "pilot_override_kubernetes", diffSelect: "Deployment:*:istiod, Service:*:istiod,MutatingWebhookConfiguration:*:istio-sidecar-injector,ClusterRoleBinding::istio-reader-istio-system", }, // TODO https://github.com/istio/istio/issues/22347 this is broken for overriding things to default value // This can be seen from REGISTRY_ONLY not applying { desc: "pilot_merge_meshconfig", diffSelect: "ConfigMap:*:istio$", }, }) }
explode_data.jsonl/47910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 374 }
[ 2830, 3393, 38495, 31115, 47, 23958, 1155, 353, 8840, 836, 8, 341, 56742, 2271, 2808, 1155, 11, 1273, 2808, 515, 197, 197, 515, 298, 41653, 25, 981, 330, 79, 23958, 9993, 756, 298, 80564, 12497, 25, 330, 10268, 4783, 10398, 53386, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUploadUnexpectedEOF(t *testing.T) { s, ops, _ := loggingSvc(emptyList) mgr := s3manager.NewUploaderWithClient(s, func(u *s3manager.Uploader) { u.Concurrency = 1 u.PartSize = s3manager.MinUploadPartSize }) _, err := mgr.Upload(&s3manager.UploadInput{ Bucket: aws.String("Bucket"), Key: aws.String("Key"), Body: &testIncompleteReader{ Size: int64(s3manager.MinUploadPartSize + 1), }, }) if err == nil { t.Error("Expected error, but received none") } // Ensure upload started. if e, a := "CreateMultipartUpload", (*ops)[0]; e != a { t.Errorf("Expected %q, but received %q", e, a) } // Part may or may not be sent because of timing of sending parts and // reading next part in upload manager. Just check for the last abort. if e, a := "AbortMultipartUpload", (*ops)[len(*ops)-1]; e != a { t.Errorf("Expected %q, but received %q", e, a) } }
explode_data.jsonl/55651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 341 }
[ 2830, 3393, 13844, 29430, 23483, 1155, 353, 8840, 836, 8, 341, 1903, 11, 27132, 11, 716, 1669, 8392, 92766, 24216, 852, 340, 2109, 901, 1669, 274, 18, 13297, 7121, 67574, 2354, 2959, 1141, 11, 2915, 8154, 353, 82, 18, 13297, 60828, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecord_Playback(t *testing.T) { r := Record{ Conn: &Playback{ Ops: []IO{ { W: []byte{10}, R: []byte{12}, }, }, D: conn.Full, DontPanic: true, }, } if d := r.Duplex(); d != conn.Full { t.Fatal(d) } v := [1]byte{} if err := r.Tx([]byte{10}, v[:]); err != nil { t.Fatal(err) } if v[0] != 12 { t.Fatalf("expected 12, got %v", v) } if r.Tx([]byte{10}, v[:]) == nil { t.Fatal("Playback.Ops is empty") } }
explode_data.jsonl/44858
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 6471, 91854, 1419, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 13583, 515, 197, 197, 9701, 25, 609, 87125, 515, 298, 197, 38904, 25, 3056, 3810, 515, 571, 197, 515, 464, 17300, 25, 3056, 3782, 90, 16, 15, 1583, 464, 11143,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSlice(t *testing.T) { for _, start := range pos { for _, end := range pos { n := nulls3.Slice(start, end) for i := uint64(0); i < uint64(8*len(n.nulls)); i++ { expected := start+i < end && nulls3.NullAt64(start+i) require.Equal(t, expected, n.NullAt64(i), "expected nulls3.Slice(%d, %d).NullAt(%d) to be %b", start, end, i, expected) } } } // Ensure we haven't modified the receiver. for i := uint16(0); i < BatchSize; i++ { expected := i%3 == 0 require.Equal(t, expected, nulls3.NullAt(i)) } }
explode_data.jsonl/37160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 33236, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1191, 1669, 2088, 1133, 341, 197, 2023, 8358, 835, 1669, 2088, 1133, 341, 298, 9038, 1669, 845, 82, 18, 95495, 10639, 11, 835, 340, 298, 2023, 600, 1669, 2622, 21, 19, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestBIP0009(t *testing.T) { t.Parallel() testBIP0009(t, "dummy", chaincfg.DeploymentTestDummy) testBIP0009(t, "segwit", chaincfg.DeploymentSegwit) }
explode_data.jsonl/55154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 33, 3298, 15, 15, 15, 24, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18185, 33, 3298, 15, 15, 15, 24, 1155, 11, 330, 31390, 497, 8781, 14072, 34848, 39130, 2271, 43344, 340, 18185, 33, 3298, 15, 15, 15, 24,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUGetMarkPrice(t *testing.T) { t.Parallel() _, err := b.UGetMarkPrice(context.Background(), currency.NewPair(currency.BTC, currency.USDT)) if err != nil { t.Error(err) } _, err = b.UGetMarkPrice(context.Background(), currency.EMPTYPAIR) if err != nil { t.Error(err) } }
explode_data.jsonl/76557
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 52, 1949, 8949, 6972, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 293, 5255, 1949, 8949, 6972, 5378, 19047, 1507, 11413, 7121, 12443, 90475, 1785, 7749, 11, 11413, 67672, 10599, 1171, 743, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSetDefaultReplicaSet(t *testing.T) { tests := []struct { rs *extensionsv1beta1.ReplicaSet expectLabels bool expectSelector bool }{ { rs: &extensionsv1beta1.ReplicaSet{ Spec: extensionsv1beta1.ReplicaSetSpec{ Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ "foo": "bar", }, }, }, }, }, expectLabels: true, expectSelector: true, }, { rs: &extensionsv1beta1.ReplicaSet{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ "bar": "foo", }, }, Spec: extensionsv1beta1.ReplicaSetSpec{ Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ "foo": "bar", }, }, }, }, }, expectLabels: false, expectSelector: true, }, { rs: &extensionsv1beta1.ReplicaSet{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ "bar": "foo", }, }, Spec: extensionsv1beta1.ReplicaSetSpec{ Selector: &metav1.LabelSelector{ MatchLabels: map[string]string{ "some": "other", }, }, Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ "foo": "bar", }, }, }, }, }, expectLabels: false, expectSelector: false, }, { rs: &extensionsv1beta1.ReplicaSet{ Spec: extensionsv1beta1.ReplicaSetSpec{ Selector: &metav1.LabelSelector{ MatchLabels: map[string]string{ "some": "other", }, }, Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ "foo": "bar", }, }, }, }, }, expectLabels: true, expectSelector: false, }, } for _, test := range tests { rs := test.rs obj2 := roundTrip(t, runtime.Object(rs)) rs2, ok := obj2.(*extensionsv1beta1.ReplicaSet) if !ok { t.Errorf("unexpected object: %v", rs2) t.FailNow() } if test.expectSelector != reflect.DeepEqual(rs2.Spec.Selector.MatchLabels, rs2.Spec.Template.Labels) { if test.expectSelector { t.Errorf("expected: %v, got: %v", rs2.Spec.Template.Labels, rs2.Spec.Selector) } else { t.Errorf("unexpected equality: %v", rs.Spec.Selector) } } if test.expectLabels != reflect.DeepEqual(rs2.Labels, rs2.Spec.Template.Labels) { if test.expectLabels { t.Errorf("expected: %v, got: %v", rs2.Spec.Template.Labels, rs2.Labels) } else { t.Errorf("unexpected equality: %v", rs.Labels) } } } }
explode_data.jsonl/53700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1334 }
[ 2830, 3393, 1649, 3675, 18327, 15317, 1649, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 41231, 1797, 353, 27609, 85, 16, 19127, 16, 2817, 79, 15317, 1649, 198, 197, 24952, 23674, 256, 1807, 198, 197, 24952, 5877, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestConvertWithMisMatchMapTypes(t *testing.T) { lines, err := convertTypes( "Foo", "Bar", ` struct Inner { 1: optional string field } struct Foo { 1: optional map<string, Inner> one 2: required string two } struct Bar { 1: optional map<string, Inner> one 2: required map<string, Inner> two }`, nil, nil, ) assert.Error(t, err) assert.Equal(t, "", lines) assert.Equal(t, "Could not convert field (two): type is not map", err.Error(), ) }
explode_data.jsonl/62056
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 12012, 2354, 83159, 8331, 2227, 4173, 1155, 353, 8840, 836, 8, 341, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, 3989, 197, 6472, 36356, 341, 298, 197, 16, 25, 10101, 914, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_unmarshal_int16(t *testing.T) { should := require.New(t) for _, c := range test.UnmarshalCombinations { buf, proto := c.CreateProtocol() proto.WriteI16(-1) var val int16 should.NoError(c.Unmarshal(buf.Bytes(), &val)) should.Equal(int16(-1), val) } }
explode_data.jsonl/36754
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 4907, 27121, 4042, 16, 21, 1155, 353, 8840, 836, 8, 341, 197, 5445, 1669, 1373, 7121, 1155, 340, 2023, 8358, 272, 1669, 2088, 1273, 38097, 1092, 73629, 341, 197, 26398, 11, 18433, 1669, 272, 7251, 20689, 741, 197, 197, 151...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStopCancelsProducers(t *testing.T) { ch := make(chan check.Check) stop := make(chan bool) s := NewScheduler(ch) // consume the enqueued checks go consume(ch, stop) minAllowedInterval = time.Millisecond // for the purpose of this test, so that the scheduler actually schedules the checks defer resetMinAllowedInterval() s.Enter(&TestCheck{intl: time.Millisecond}) s.Run() time.Sleep(2 * time.Millisecond) // wait for the scheduler to actually schedule the check s.Stop() // stop check consumer routine stop <- true // once the scheduler is stopped, it should be safe to close this channel. Otherwise, this should panic close(s.checksPipe) // sleep to make the runtime schedule the hanging producer goroutines, if there are any left time.Sleep(time.Millisecond) }
explode_data.jsonl/23207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 10674, 34, 1129, 2010, 1336, 33375, 1155, 353, 8840, 836, 8, 341, 23049, 1669, 1281, 35190, 1779, 10600, 340, 62644, 1669, 1281, 35190, 1807, 340, 1903, 1669, 1532, 38878, 7520, 692, 197, 322, 24057, 279, 662, 66547, 12341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Monitor_405(t *testing.T) { t.Parallel() app := fiber.New() app.Use("/", New()) resp, err := app.Test(httptest.NewRequest(fiber.MethodPost, "/", nil)) utils.AssertEqual(t, nil, err) utils.AssertEqual(t, 405, resp.StatusCode) }
explode_data.jsonl/23610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 1245, 30314, 62, 19, 15, 20, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 28236, 1669, 23788, 7121, 2822, 28236, 9046, 35460, 1532, 12367, 34653, 11, 1848, 1669, 906, 8787, 73392, 83, 70334, 75274, 955, 8629, 20798...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLatestVersion(t *testing.T) { if testing.Short() { t.Skip("skipping test in short mode.") } version, err := LatestVersion() if err != nil { t.Fatalf("%v", err) } if version == "" { t.Fatalf("Version not found") } t.Logf("Latest version is %q, current version: %q", version, InitializedVersion()) }
explode_data.jsonl/35278
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 31992, 5637, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 304, 2805, 3856, 13053, 197, 630, 74954, 11, 1848, 1669, 28157, 5637, 741, 743, 1848, 961, 2092, 341, 197, 324...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCheckControlPlanePodExistence(t *testing.T) { var testCases = []struct { checkDescription string resources []string expected []string }{ { checkDescription: "controller pod is running", resources: []string{` apiVersion: v1 kind: Pod metadata: name: linkerd-controller-6f78cbd47-bc557 namespace: test-ns status: phase: Running podIP: 1.2.3.4 `, }, expected: []string{ "cat1 controller pod is running", }, }, { checkDescription: "'linkerd-config' config map exists", resources: []string{` apiVersion: v1 kind: ConfigMap metadata: name: linkerd-config namespace: test-ns `, }, expected: []string{ "cat1 'linkerd-config' config map exists", }, }, { checkDescription: "'linkerd-config' config map exists", resources: []string{` apiVersion: v1 kind: ConfigMap metadata: name: linkerd-config namespace: test-ns data: values: |- tracing: collector: name: linkerd-collector enabled: false jaeger: name: linkerd-jaeger enabled: true `, }, expected: []string{ "cat1 'linkerd-config' config map exists", }, }, } for id, testCase := range testCases { testCase := testCase t.Run(fmt.Sprintf("%d", id), func(t *testing.T) { hc := NewHealthChecker( []CategoryID{}, &Options{ ControlPlaneNamespace: "test-ns", }, ) var err error hc.kubeAPI, err = k8s.NewFakeAPI(testCase.resources...) if err != nil { t.Fatalf("Unexpected error: %s", err) } // validate that this check relies on the k8s api, not on hc.controlPlanePods hc.addCheckAsCategory("cat1", LinkerdControlPlaneExistenceChecks, testCase.checkDescription) obs := newObserver() hc.RunChecks(obs.resultFn) if !reflect.DeepEqual(obs.results, testCase.expected) { t.Fatalf("Expected results %v, but got %v", testCase.expected, obs.results) } }) } }
explode_data.jsonl/19140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 805 }
[ 2830, 3393, 3973, 3273, 34570, 23527, 25613, 763, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 37302, 284, 3056, 1235, 341, 197, 25157, 5009, 914, 198, 197, 10202, 2360, 286, 3056, 917, 198, 197, 42400, 260, 3056, 917, 198, 197, 59403, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSyncWithPlotter(t *testing.T) { t.Parallel() g := gomega.NewGomegaWithT(t) // Set the logger to development mode for verbose logs. logf.SetLogger(zap.New(zap.UseDevMode(true))) namespaced := types.NamespacedName{ Name: "notebook", Namespace: "default", } application := &app.FybrikApplication{} g.Expect(readObjectFromFile("../../testdata/unittests/fybrikcopyapp-csv.yaml", application)).NotTo(gomega.HaveOccurred()) // imitate a ready phase for the earlier generation application.SetGeneration(2) application.Finalizers = []string{"TestReconciler.finalizer"} controllerNamespace := utils.GetControllerNamespace() fmt.Printf("FybrikApplication unit test: controller namespace " + controllerNamespace) application.Status.Generated = &app.ResourceReference{Name: "plotter", Namespace: controllerNamespace, Kind: "Plotter", AppVersion: 1} application.Status.Ready = true application.Status.ObservedGeneration = 1 // Objects to track in the fake client. objs := []runtime.Object{ application, } // Register operator types with the runtime scheme. s := utils.NewScheme(g) // Create a fake client to mock API calls. cl := fake.NewFakeClientWithScheme(s, objs...) plotter := &app.Plotter{} g.Expect(readObjectFromFile("../../testdata/plotter.yaml", plotter)).NotTo(gomega.HaveOccurred()) plotter.Status.ObservedState.Ready = true plotter.Namespace = controllerNamespace g.Expect(cl.Create(context.Background(), plotter)).NotTo(gomega.HaveOccurred()) // Create a FybrikApplicationReconciler object with the scheme and fake client. r := createTestFybrikApplicationController(cl, s) req := reconcile.Request{ NamespacedName: namespaced, } _, err := r.Reconcile(context.Background(), req) g.Expect(err).To(gomega.BeNil()) newApp := &app.FybrikApplication{} err = cl.Get(context.Background(), req.NamespacedName, newApp) g.Expect(err).To(gomega.BeNil(), "Cannot fetch fybrikapplication") g.Expect(getErrorMessages(newApp)).NotTo(gomega.BeEmpty()) g.Expect(newApp.Status.Ready).NotTo(gomega.BeTrue()) }
explode_data.jsonl/45026
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 709 }
[ 2830, 3393, 12154, 2354, 25605, 465, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 197, 322, 2573, 279, 5925, 311, 4401, 3856, 369, 13694, 18422, 624, 6725, 69, 420...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJob_Info(t *testing.T) { type fields struct { session session.ServiceFormatter info Response } tests := []struct { name string fields fields want Info wantErr bool }{ { name: "Passing", fields: fields{ info: Response{ ID: "1234", }, session: &mockSessionFormatter{ url: "https://test.salesforce.com", client: mockHTTPClient(func(req *http.Request) *http.Response { if req.URL.String() != "https://test.salesforce.com/jobs/ingest/1234" { return &http.Response{ StatusCode: 500, Status: "Invalid URL", Body: ioutil.NopCloser(strings.NewReader(req.URL.String())), Header: make(http.Header), } } if req.Method != http.MethodGet { return &http.Response{ StatusCode: 500, Status: "Invalid Method", Body: ioutil.NopCloser(strings.NewReader(req.Method)), Header: make(http.Header), } } resp := `{ "apiVersion": 44.0, "columnDelimiter": "COMMA", "concurrencyMode": "Parallel", "contentType": "CSV", "contentUrl": "services/v44.0/jobs", "createdById": "1234", "createdDate": "1/1/1970", "externalIdFieldName": "namename", "id": "9876", "jobType": "V2Ingest", "lineEnding": "LF", "object": "Account", "operation": "Insert", "state": "Open", "systemModstamp": "1/1/1980", "apexProcessingTime": 0, "apiActiveProcessingTime": 70, "numberRecordsFailed": 1, "numberRecordsProcessed": 1, "retries": 0, "totalProcessingTime": 105, "errorMessage": "" }` return &http.Response{ StatusCode: http.StatusOK, Status: "Good", Body: ioutil.NopCloser(strings.NewReader(resp)), Header: make(http.Header), } }), }, }, want: Info{ Response: Response{ APIVersion: 44.0, ColumnDelimiter: "COMMA", ConcurrencyMode: "Parallel", ContentType: "CSV", ContentURL: "services/v44.0/jobs", CreatedByID: "1234", CreatedDate: "1/1/1970", ExternalIDFieldName: "namename", ID: "9876", JobType: "V2Ingest", LineEnding: "LF", Object: "Account", Operation: "Insert", State: "Open", SystemModstamp: "1/1/1980", }, ApexProcessingTime: 0, APIActiveProcessingTime: 70, NumberRecordsFailed: 1, NumberRecordsProcessed: 1, Retries: 0, TotalProcessingTime: 105, ErrorMessage: "", }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { j := &Job{ session: tt.fields.session, info: tt.fields.info, } got, err := j.Info() if (err != nil) != tt.wantErr { t.Errorf("Job.Info() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Job.Info() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/19883
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1718 }
[ 2830, 3393, 12245, 39624, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 25054, 3797, 13860, 14183, 198, 197, 27043, 262, 5949, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 55276, 220, 5043, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvoiceChecksumMalleability(t *testing.T) { privKeyHex := "7f9f2872307ba178b75434250da5fcac12e9d47fe47d90c1f0cb0641a450cff8" privKeyBytes, _ := hex.DecodeString(privKeyHex) chain := chaincfg.RegNetParams() var payHash [32]byte ts := time.Unix(0, 0) privKey := secp256k1.PrivKeyFromBytes(privKeyBytes) pubKey := privKey.PubKey() msgSigner := MessageSigner{ SignCompact: func(hash []byte) ([]byte, error) { return ecdsa.SignCompact(privKey, hash, true), nil }, } opts := []func(*Invoice){Description("test")} invoice, err := NewInvoice(chain, payHash, ts, opts...) if err != nil { t.Fatal(err) } encoded, err := invoice.Encode(msgSigner) if err != nil { t.Fatal(err) } t.Logf("encoded %s", encoded) t.Logf("pubkey %x", pubKey.SerializeCompressed()) // Changing a bech32 string which checksum ends in "p" to "(q*)p" can // cause the checksum to return as a valid bech32 string _but_ the // signature field immediately preceding it would be mutaded. In rare // cases (about 3%) it is still seen as a valid signature and public // key recovery causes a different node than the originally intended // one to be derived. // // We thus modify the checksum here and verify the invoice gets broken // enough that it fails to decode. if !strings.HasSuffix(encoded, "p") { t.Logf("Invoice: %s", encoded) t.Fatalf("Generated invoice checksum does not end in 'p'") } encoded = encoded[:len(encoded)-1] + "qp" _, err = Decode(encoded, chain) if err == nil { t.Fatalf("Did not get expected error when decoding invoice") } }
explode_data.jsonl/40483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 583 }
[ 2830, 3393, 34674, 73190, 44, 5054, 2897, 1155, 353, 8840, 836, 8, 341, 71170, 1592, 20335, 1669, 330, 22, 69, 24, 69, 17, 23, 22, 17, 18, 15, 22, 4645, 16, 22, 23, 65, 22, 20, 19, 18, 19, 17, 20, 15, 3235, 20, 8316, 580, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorkspace_CopilotDirPath(t *testing.T) { // turn "test/copilot" into a platform-dependent path var manifestDir = filepath.FromSlash("test/copilot") testCases := map[string]struct { expectedManifestDir string presetManifestDir string workingDir string expectedError error mockFileSystem func(fs afero.Fs) }{ "same directory level": { expectedManifestDir: manifestDir, workingDir: filepath.FromSlash("test/"), mockFileSystem: func(fs afero.Fs) { fs.MkdirAll("test/copilot", 0755) }, }, "same directory": { expectedManifestDir: manifestDir, workingDir: filepath.FromSlash("test/copilot"), mockFileSystem: func(fs afero.Fs) { fs.MkdirAll("test/copilot", 0755) }, }, "several levels deep": { expectedManifestDir: manifestDir, workingDir: filepath.FromSlash("test/1/2/3/4"), mockFileSystem: func(fs afero.Fs) { fs.MkdirAll("test/copilot", 0755) fs.MkdirAll("test/1/2/3/4", 0755) }, }, "too many levels deep": { expectedError: fmt.Errorf("couldn't find a directory called copilot up to 5 levels up from " + filepath.FromSlash("test/1/2/3/4/5")), workingDir: filepath.FromSlash("test/1/2/3/4/5"), mockFileSystem: func(fs afero.Fs) { fs.MkdirAll("test/copilot", 0755) fs.MkdirAll("test/1/2/3/4/5", 0755) }, }, "out of a workspace": { expectedError: fmt.Errorf("couldn't find a directory called copilot up to 5 levels up from " + filepath.FromSlash("/")), workingDir: filepath.FromSlash("/"), mockFileSystem: func(fs afero.Fs) { fs.MkdirAll("test/copilot", 0755) }, }, "uses precomputed manifest path": { expectedManifestDir: manifestDir, workingDir: filepath.FromSlash("/"), mockFileSystem: func(fs afero.Fs) {}, presetManifestDir: filepath.FromSlash("test/copilot"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // Create an empty FileSystem fs := afero.NewMemMapFs() // Set it up tc.mockFileSystem(fs) ws := Workspace{ workingDir: tc.workingDir, fsUtils: &afero.Afero{Fs: fs}, copilotDir: tc.presetManifestDir, } manifestDirPath, err := ws.CopilotDirPath() if tc.expectedError == nil { require.NoError(t, err) require.Equal(t, tc.expectedManifestDir, manifestDirPath) } else { require.Equal(t, tc.expectedError.Error(), err.Error()) } }) } }
explode_data.jsonl/30111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1092 }
[ 2830, 3393, 45981, 920, 453, 23958, 6184, 1820, 1155, 353, 8840, 836, 8, 341, 197, 322, 2484, 330, 1944, 66659, 23958, 1, 1119, 264, 5339, 42818, 1815, 198, 2405, 14455, 6184, 284, 26054, 11439, 88004, 445, 1944, 66659, 23958, 5130, 181...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParsePointMissingFieldValue(t *testing.T) { _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=`) } _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value= 1000000000i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value= 1000000000i`) } _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=,value2=1i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=,value2=1i`) } _, err = models.ParsePointsString(`cpu,host=server01,region=us-west 1434055562000000000i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=server01,region=us-west 1434055562000000000i`) } _, err = models.ParsePointsString(`cpu,host=server01,region=us-west value=1i,b`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=server01,region=us-west value=1i,b`) } _, err = models.ParsePointsString(`m f="blah"=123,r 1531703600000000000`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `m f="blah"=123,r 1531703600000000000`) } }
explode_data.jsonl/16905
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 542 }
[ 2830, 3393, 14463, 2609, 25080, 52112, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 4119, 8937, 11411, 703, 5809, 16475, 11, 3790, 28, 4030, 32, 11, 3943, 28, 355, 37602, 897, 22250, 340, 743, 1848, 621, 2092, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRebalanceTooManyConsumers(t *testing.T) { t.Parallel() ctx := context.Background() conf := ReaderConfig{ Brokers: []string{"localhost:9092"}, GroupID: makeGroupID(), Topic: makeTopic(), MaxWait: time.Second, } // Create the first reader and wait for it to become the leader. r1 := NewReader(conf) prepareReader(t, ctx, r1, makeTestSequence(1)...) r1.ReadMessage(ctx) // Clear the stats from the first rebalance. r1.Stats() // Second reader should cause one rebalance for each r1 and r2. r2 := NewReader(conf) // Wait for rebalances. time.Sleep(5 * time.Second) // Before the fix, r2 would cause continuous rebalances, // as it tried to handshake() repeatedly. rebalances := r1.Stats().Rebalances + r2.Stats().Rebalances if rebalances > 2 { t.Errorf("unexpected rebalances to first reader, got %d", rebalances) } }
explode_data.jsonl/80378
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 693, 21571, 31246, 8441, 41966, 388, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 1669, 2266, 19047, 741, 67850, 1669, 25166, 2648, 515, 197, 12791, 299, 26177, 25, 3056, 917, 4913, 8301, 25, 24, 15, 24, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRuleForbiddenFunction(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := []string{ `CREATE FUNCTION hello (s CHAR(20));`, } for _, sql := range sqls { q, _ := NewQuery4Audit(sql) rule := q.RuleForbiddenFunction() if rule.Item != "FUN.009" { t.Error("Rule not match:", rule.Item, "Expect : FUN.009") } } common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76800
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 11337, 69115, 5152, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 3056, 917, 515, 197, 197, 63, 22599, 24819, 23811, 320, 82, 23997, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWaiting_ExecuteInbound(t *testing.T) { followup, _, err := (&waiting{}).ExecuteInbound(nil, &metaData{}) require.NoError(t, err) require.Equal(t, &noOp{}, followup) }
explode_data.jsonl/66255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 42104, 83453, 641, 10891, 1155, 353, 8840, 836, 8, 341, 1166, 1544, 454, 11, 8358, 1848, 1669, 15899, 49534, 6257, 568, 17174, 641, 10891, 27907, 11, 609, 5490, 1043, 37790, 17957, 35699, 1155, 11, 1848, 340, 17957, 12808, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIDF1(t *testing.T) { df := NewDocumentFrequency() term := "car" vocab := map[string]int{ term: 1, } df.AddVocabulary(vocab) _, ok := df.IDF(term) okExpected := true if ok != okExpected { t.Error("index.TestIDF1: okExpected ", okExpected, " got ", ok) } }
explode_data.jsonl/9829
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 915, 37, 16, 1155, 353, 8840, 836, 8, 341, 85187, 1669, 1532, 7524, 38614, 741, 197, 4991, 1669, 330, 6918, 698, 5195, 20497, 1669, 2415, 14032, 63025, 515, 197, 197, 4991, 25, 220, 16, 345, 197, 532, 85187, 1904, 53, 43...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTerminatorTimeout(t *testing.T) { ch := make(chan time.Time) c := NewClient().(*client) c.opts.termFlushCh = ch n := newTestTerminatorConn() conn := c.newConnection(n) conn.w.Add(1) go conn.terminator() i1, p1 := conn.p.alloc() var err1 error conn.w.Add(1) go func() { defer conn.w.Done() defer conn.p.free(i1) _, err1 = p1.get() }() i2, p2 := conn.p.alloc() var err2 error conn.w.Add(1) go func() { defer conn.w.Done() defer conn.p.free(i2) _, err2 = p2.get() }() next := time.Unix(0, *p1.t).Add(2 * defTimeout) *p2.t = next.UnixNano() ch <- next close(conn.t) conn.w.Wait() assert.True(t, n.c) assert.Equal(t, errTimeout, err1) assert.Equal(t, errReaderBroken, err2) }
explode_data.jsonl/17385
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 21209, 1065, 850, 7636, 1155, 353, 8840, 836, 8, 341, 23049, 1669, 1281, 35190, 882, 16299, 340, 1444, 1669, 1532, 2959, 1005, 4071, 2972, 340, 1444, 56022, 47770, 46874, 1143, 284, 521, 271, 9038, 1669, 501, 2271, 21209, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestArea(t *testing.T) { rectangle := Rectangle{10.0, 5.0} got := Area(rectangle) want := 50.0 if got != want { t.Errorf("got %.2f want %.2f", got, want) } }
explode_data.jsonl/60536
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 8726, 1155, 353, 8840, 836, 8, 220, 341, 197, 55316, 1669, 19280, 90, 16, 15, 13, 15, 11, 220, 20, 13, 15, 532, 3174, 354, 1669, 12030, 30354, 4044, 340, 50780, 1669, 220, 20, 15, 13, 15, 271, 743, 2684, 961, 1366, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCleanup(t *testing.T) { cleanedup1 := false cleanedup2 := false cleanup1 := func() DError { cleanedup1 = true return nil } cleanup2 := func() DError { cleanedup2 = true return nil } cleanupFail := func() DError { return Errf("failed cleanup") } w := testWorkflow() w.addCleanupHook(cleanup1) w.addCleanupHook(cleanupFail) w.addCleanupHook(cleanup2) w.cleanup() if !cleanedup1 { t.Error("cleanup1 was not run") } if !cleanedup2 { t.Error("cleanup2 was not run") } }
explode_data.jsonl/3863
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 67335, 1155, 353, 8840, 836, 8, 341, 1444, 2675, 291, 454, 16, 1669, 895, 198, 1444, 2675, 291, 454, 17, 1669, 895, 198, 1444, 60639, 16, 1669, 2915, 368, 422, 1454, 341, 197, 1444, 2675, 291, 454, 16, 284, 830, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateArchiveBadConfig(t *testing.T) { common.SetupConfig("") zipFilePath := getArchivePath() filePath, err := createArchive(zipFilePath, true, SearchPaths{}, "") assert.Nil(t, err) assert.Equal(t, zipFilePath, filePath) if _, err := os.Stat(zipFilePath); os.IsNotExist(err) { assert.Fail(t, "The Zip File was not created") } else { os.Remove(zipFilePath) } }
explode_data.jsonl/18212
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 4021, 42502, 17082, 2648, 1155, 353, 8840, 836, 8, 341, 83825, 39820, 2648, 31764, 197, 9964, 19090, 1669, 633, 42502, 1820, 741, 17661, 1820, 11, 1848, 1669, 1855, 42502, 38249, 19090, 11, 830, 11, 7542, 26901, 22655, 85617, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStackTraceRequest(t *testing.T) { runTest(t, "increment", func(client *daptest.Client, fixture protest.Fixture) { var stResp *dap.StackTraceResponse runDebugSessionWithBPs(t, client, "launch", // Launch func() { client.LaunchRequest("exec", fixture.Path, !stopOnEntry) }, // Set breakpoints fixture.Source, []int{8, 18}, []onBreakpoint{{ // Stop at line 8 execute: func() { // Even though the stack frames do not change, // repeated requests at the same breakpoint // would assign next block of unique ids to them each time. const NumFrames = 6 reqIndex := -1 frameID := func(frameIndex int) int { reqIndex++ return startHandle + NumFrames*reqIndex + frameIndex } tests := map[string]struct { startFrame int levels int wantStartName string wantStartLine int wantStartFrame int wantFramesReturned int wantFramesAvailable int }{ "all frame levels from 0 to NumFrames": {0, NumFrames, "main.Increment", 8, 0, NumFrames, NumFrames}, "subset of frames from 1 to -1": {1, NumFrames - 1, "main.Increment", 11, 1, NumFrames - 1, NumFrames}, "load stack in pages: first half": {0, NumFrames / 2, "main.Increment", 8, 0, NumFrames / 2, NumFrames}, "load stack in pages: second half": {NumFrames / 2, NumFrames, "main.main", 17, NumFrames / 2, NumFrames / 2, NumFrames}, "zero levels means all levels": {0, 0, "main.Increment", 8, 0, NumFrames, NumFrames}, "zero levels means all remaining levels": {NumFrames / 2, 0, "main.main", 17, NumFrames / 2, NumFrames / 2, NumFrames}, "negative levels treated as 0 (all)": {0, -10, "main.Increment", 8, 0, NumFrames, NumFrames}, "OOB levels is capped at available len": {0, NumFrames + 1, "main.Increment", 8, 0, NumFrames, NumFrames}, "OOB levels is capped at available len 1": {1, NumFrames + 1, "main.Increment", 11, 1, NumFrames - 1, NumFrames}, "negative startFrame treated as 0": {-10, 0, "main.Increment", 8, 0, NumFrames, NumFrames}, "OOB startFrame returns empty trace": {NumFrames, 0, "main.Increment", -1, -1, 0, NumFrames}, } for name, tc := range tests { client.StackTraceRequest(1, tc.startFrame, tc.levels) stResp = client.ExpectStackTraceResponse(t) checkStackFramesNamed(name, t, stResp, tc.wantStartName, tc.wantStartLine, frameID(tc.wantStartFrame), tc.wantFramesReturned, tc.wantFramesAvailable) } }, disconnect: false, }, { // Stop at line 18 execute: func() { // Frame ids get reset at each breakpoint. client.StackTraceRequest(1, 0, 0) stResp = client.ExpectStackTraceResponse(t) checkStackFrames(t, stResp, "main.main", 18, startHandle, 3, 3) }, disconnect: false, }}) }) }
explode_data.jsonl/17315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1223 }
[ 2830, 3393, 7657, 1900, 1155, 353, 8840, 836, 8, 341, 56742, 2271, 1155, 11, 330, 35744, 497, 2915, 12805, 353, 91294, 1944, 11716, 11, 12507, 8665, 991, 12735, 8, 341, 197, 2405, 357, 36555, 353, 91294, 73179, 2582, 198, 197, 56742, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncryptAndDecryptKey(t *testing.T) { assert := assert.New(t) kek, err := models.ParseKeyEncryptionKey("40:E68256A8F9685DA6BE7C5FE461C0838E0751ED547E9A824B2BDA9E2580703643") assert.NoError(err) otherKek, err := models.ParseKeyEncryptionKey("12:E68256A8F9685DA6BE7C5FE461C0838E0751ED547E9A824B2BDA9E2580703643") assert.NoError(err) wrongKek, err := models.ParseKeyEncryptionKey("40:0D17FE0FDA5F1CE46307561714C6938FDFE9408BF6712BE49D3FC4C757D0E62E") assert.NoError(err) key, err := crypto.GenerateAESKey() assert.NoError(err) dek, err := kek.Encrypt(key) assert.NoError(err) assert.Equal(kek.ID, dek.KeyEncryptionKeyID) _, err = otherKek.Decrypt(dek) assert.Error(err) _, err = wrongKek.Decrypt(dek) assert.Error(err) plaintext, err := kek.Decrypt(dek) assert.NoError(err) assert.Equal(hex.EncodeToString(key), hex.EncodeToString(plaintext)) }
explode_data.jsonl/16333
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 403 }
[ 2830, 3393, 61520, 3036, 89660, 1592, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 197, 85207, 11, 1848, 1669, 4119, 8937, 1592, 79239, 1592, 445, 19, 15, 84488, 21, 23, 17, 20, 21, 32, 23, 37, 24, 21, 23, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplaceWithOnlyEmoticon(t *testing.T) { var testMatches = map[string]match{ "XD": match{ replacement: "laughing", regexp: getEmoticonRegexp("XD"), }, "</3": match{ replacement: "broken_heart", regexp: getEmoticonRegexp("</3"), }, "8)": match{ replacement: "sunglasses", regexp: getEmoticonRegexp("8)"), }, } input := "XD" res := translate(input, testMatches) assert.Equal(t, ":laughing:", res) input = " XD \t" res = translate(input, testMatches) assert.Equal(t, " :laughing: \t", res) input = "XD aaaaaa" res = translate(input, testMatches) assert.Equal(t, ":laughing: aaaaaa", res) input = "aaaaaa XD" res = translate(input, testMatches) assert.Equal(t, "aaaaaa :laughing:", res) input = "XD\nXD\n XD \n XD\n\aaa XD bbbb\naaa XD\nXD bbbbb\naaaaaaXDbbbbb" res = translate(input, testMatches) assert.Equal(t, ":laughing:\n:laughing:\n :laughing: \n :laughing:\n\aaa :laughing: bbbb\naaa :laughing:\n:laughing: bbbbb\naaaaaaXDbbbbb", res) input = "aaaaaa XD </3 XDaaa aaa</3 XD XD 8)" res = translate(input, testMatches) assert.Equal(t, "aaaaaa :laughing: :broken_heart: XDaaa aaa</3 :laughing: :laughing: :sunglasses:", res) }
explode_data.jsonl/35688
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 592 }
[ 2830, 3393, 23107, 2354, 7308, 2269, 354, 1924, 1155, 353, 8840, 836, 8, 972, 2405, 1273, 42470, 284, 2415, 14032, 60, 6347, 1666, 197, 197, 1, 67677, 788, 2432, 1666, 298, 17200, 16101, 25, 330, 4260, 7443, 287, 4723, 298, 37013, 458...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDescribeLoadBalancerOnEnsure(t *testing.T) { awsServices := NewFakeAWSServices() c, _ := newAWSCloud(strings.NewReader("[global]"), awsServices) awsServices.elb.expectDescribeLoadBalancers("aid") c.EnsureLoadBalancer(TestClusterName, &v1.Service{ObjectMeta: metav1.ObjectMeta{Name: "myservice", UID: "id"}}, []*v1.Node{}) }
explode_data.jsonl/12860
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 74785, 5879, 93825, 1925, 64439, 1155, 353, 8840, 836, 8, 341, 197, 8635, 11025, 1669, 1532, 52317, 14419, 1220, 2161, 741, 1444, 11, 716, 1669, 501, 14419, 3540, 52178, 51442, 68587, 10937, 9752, 60, 3975, 31521, 11025, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPositiveBillUser(t *testing.T) { check := assert.New(t) // disable transport swap for http mock gorequest.DisableTransportSwap = true // create request handler for mocking requestHandler := utils.NewRequestHandler("balance_api") httpmock.ActivateNonDefault(requestHandler.Handler.Client) defer httpmock.DeactivateAndReset() // prepare bill event billEvent := getBillingEvent() // mock http request httpmock.RegisterResponder("POST", "https://balance-svc-dev.com/Billing/1", httpmock.NewStringResponder(200, responseBody)) // call balance api balanceRequestHandler := getBalanceHandler(requestHandler) response, isSuccessful := balanceRequestHandler.BillUser(billEvent) if isSuccessful { check.Equal(responseBody, string(response)) } // get the amount of calls for the registered responder info := httpmock.GetCallCountInfo() check.Equal(1, info["POST https://balance-svc-dev.com/Billing/1"]) }
explode_data.jsonl/70026
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 35490, 27476, 1474, 1155, 353, 8840, 836, 8, 1476, 25157, 1669, 2060, 7121, 1155, 692, 197, 322, 11156, 7557, 14291, 369, 1758, 7860, 198, 3174, 460, 719, 10166, 480, 27560, 46179, 284, 830, 271, 197, 322, 1855, 1681, 7013, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseEnviron(t *testing.T) { for i, tt := range envParseTests { results := parseEnviron(tt.Env) if !reflect.DeepEqual(tt.Expected, results) { t.Errorf("%d: Expected: %#v Got: %#v", i, tt.Expected, results) } } }
explode_data.jsonl/63439
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 14463, 1702, 2772, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17853, 1669, 2088, 6105, 14463, 18200, 341, 197, 55497, 1669, 4715, 1702, 2772, 47152, 81214, 340, 197, 743, 753, 34913, 94750, 47152, 5121, 4046, 11, 3059, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJetStreamSubscribe_DeliverPolicy(t *testing.T) { s := RunBasicJetStreamServer() defer s.Shutdown() if config := s.JetStreamConfig(); config != nil { defer os.RemoveAll(config.StoreDir) } nc, err := nats.Connect(s.ClientURL()) if err != nil { t.Fatalf("Unexpected error: %v", err) } defer nc.Close() js, err := nc.JetStream() if err != nil { t.Fatalf("Unexpected error: %v", err) } // Create the stream using our client API. _, err = js.AddStream(&nats.StreamConfig{ Name: "TEST", Subjects: []string{"foo", "bar"}, }) if err != nil { t.Fatalf("Unexpected error: %v", err) } var publishTime time.Time for i := 0; i < 10; i++ { payload := fmt.Sprintf("i:%d", i) if i == 5 { publishTime = time.Now() } js.Publish("foo", []byte(payload)) } for _, test := range []struct { name string subopt nats.SubOpt expected int }{ { "deliver.all", nats.DeliverAll(), 10, }, { "deliver.last", nats.DeliverLast(), 1, }, { "deliver.new", nats.DeliverNew(), 0, }, { "deliver.starttime", nats.StartTime(publishTime), 5, }, { "deliver.startseq", nats.StartSequence(6), 5, }, } { test := test t.Run(test.name, func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) defer cancel() got := 0 sub, err := js.Subscribe("foo", func(m *nats.Msg) { got++ if got == test.expected { cancel() } }, test.subopt) if err != nil { t.Fatalf("Unexpected error: %v", err) } <-ctx.Done() sub.Drain() if got != test.expected { t.Fatalf("Expected %d, got %d", test.expected, got) } }) } js.Publish("bar", []byte("bar msg 1")) js.Publish("bar", []byte("bar msg 2")) sub, err := js.SubscribeSync("bar", nats.BindStream("TEST"), nats.DeliverLastPerSubject()) if err != nil { t.Fatalf("Error on subscribe: %v", err) } msg, err := sub.NextMsg(time.Second) if err != nil { t.Fatalf("Error on next msg: %v", err) } if string(msg.Data) != "bar msg 2" { t.Fatalf("Unexepcted last message: %q", msg.Data) } }
explode_data.jsonl/29170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 923 }
[ 2830, 3393, 35641, 3027, 28573, 1557, 301, 1524, 13825, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 6452, 15944, 35641, 3027, 5475, 741, 16867, 274, 10849, 18452, 2822, 743, 2193, 1669, 274, 3503, 295, 3027, 2648, 2129, 2193, 961, 2092, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEnvSource_GetValue(t *testing.T) { t.Run("get existed envars", func(t *testing.T) { setupConfigTest(t) var key, value string key = "TEST_OPTION1_OPTION2" value = "test_option1_option2" opt := "test.option1.option2" createTestEnvVars(t, key, value) v, err := testSource.GetValue(opt) defaultAssert.Equal(v, value) defaultAssert.Nil(err) }) t.Run("get nil envars", func(t *testing.T) { setupConfigTest(t) var key, value string key = "TEST_NIL" value = "" opt := "test.nil" createTestEnvVars(t, key, value) v, err := testSource.GetValue(opt) defaultAssert.Nil(v) defaultAssert.ErrorIs(err, ErrEmptyValue) }) t.Run("test regex with uncorrect envars", func(t *testing.T) { setupConfigTest(t) var key, value string key = "TEST_FORMAT1" value = "test_format1" opt := []string{"test.format1+format2", "test-format1.format2", "test&format1"} createTestEnvVars(t, key, value) for _, val := range opt { v, err := testSource.GetValue(val) defaultAssert.NotEqual(v, value) defaultAssert.ErrorIs(err, ErrInvalidFormat) } }) }
explode_data.jsonl/60667
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 465 }
[ 2830, 3393, 14359, 3608, 13614, 1130, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 455, 24295, 662, 15380, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 84571, 2648, 2271, 1155, 340, 197, 2405, 1376, 11, 897, 914, 198, 197, 23634, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueryExecutor(t *testing.T) { tests := []queryExecTestCase{ buildSimpleSuccessTestCase(t), buildPlannerErrorTestCase(t), buildPlannerErrorTestCaseStatusMessage(t), buildPlanInvalidUUIDTestCase(t), buildConsumeErrorTestCase(t), buildStreamResultErrorTestCase(t), buildResumeQueryTestCase(t), buildResumeQueryBadQueryIDTestCase(t), buildMutationFailedQueryTestCase(t), } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { runTestCase(t, &test) }) } }
explode_data.jsonl/41512
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 2859, 25255, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1631, 10216, 16458, 515, 197, 69371, 16374, 7188, 16458, 1155, 1326, 197, 69371, 2120, 4887, 1454, 16458, 1155, 1326, 197, 69371, 2120, 4887, 1454, 16458, 2522, 205...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_singleFileNotRotate(t *testing.T) { fileName := os.TempDir() + "/test.singleFile" metaDir := os.TempDir() + "/rotates" //create file & write file CreateFile(fileName, "12345") defer DeleteFile(fileName) //create sf meta, err := NewMeta(metaDir, metaDir, testlogpath, ModeFile, "", DefautFileRetention) if err != nil { t.Error(err) } sf, err := NewSingleFile(meta, fileName, WhenceOldest, false) if err != nil { t.Error(err) } oldInode, err := utilsos.GetIdentifyIDByFile(sf.f) assert.NoError(t, err) //read file 正常读 p := make([]byte, 5) n, err := sf.Read(p) if err != nil { t.Error(err) } assert.Equal(t, 5, n) assert.Equal(t, "12345", string(p)) //应该遇到EOF,pfi没有被更新 n, err = sf.Read(p) assert.Equal(t, io.EOF, err) newInode, err := utilsos.GetIdentifyIDByFile(sf.f) assert.NoError(t, err) assert.Equal(t, newInode, oldInode) //append文件 appendTestFile(fileName, "67890") n, err = sf.Read(p) if err != nil { t.Error(err) } assert.Equal(t, 5, n) assert.Equal(t, "67890", string(p)) }
explode_data.jsonl/6870
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 487 }
[ 2830, 3393, 19487, 1703, 2623, 34540, 1155, 353, 8840, 836, 8, 341, 17661, 675, 1669, 2643, 65009, 6184, 368, 488, 3521, 1944, 32301, 1703, 698, 84004, 6184, 1669, 2643, 65009, 6184, 368, 488, 3521, 4640, 973, 1837, 197, 322, 3182, 1034...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestArray_Sort(t *testing.T) { gtest.C(t, func(t *gtest.T) { expect1 := []interface{}{0, 1, 2, 3} expect2 := []interface{}{3, 2, 1, 0} array := garray.NewArray() for i := 3; i >= 0; i-- { array.Append(i) } array.SortFunc(func(v1, v2 interface{}) bool { return v1.(int) < v2.(int) }) t.Assert(array.Slice(), expect1) array.SortFunc(func(v1, v2 interface{}) bool { return v1.(int) > v2.(int) }) t.Assert(array.Slice(), expect2) }) }
explode_data.jsonl/13889
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 1857, 1098, 371, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 24952, 16, 1669, 3056, 4970, 6257, 90, 15, 11, 220, 16, 11, 220, 17, 11, 220, 18, 532, 197, 24952, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestS3Volume_Mount(t *testing.T) { s3driver := S3Volume{ Endpoint: "localhost:9000", Bucket: "shadowtest", AccessKey: "3D2U2V66A3CP0CB088Z3", SecretKey: "Uipi4szPTGhyjoTFsmtXJrIf9cbqnfLRPQH6e8Ho", SSL: false, } // Create the destination directory os.MkdirAll("/tmp/shadowtest/mnt", 0755) // Set the S3 environment to do the test client := s3driver.GetMinioClient() client.MakeBucket("shadowtest", "") file, err := os.OpenFile("../../contrib/s3_test_archive.zip", os.O_RDONLY, 0) assert.Nil(t, err) stat, err := file.Stat() assert.Nil(t, err) _, err = client.PutObject("shadowtest", "s3_test_archive.zip", file, stat.Size(), minio.PutObjectOptions{}) assert.Nil(t, err) // Test mounting s3driver.Mount("s3_test_archive.zip", "/tmp/shadowtest/mnt") content, err := ioutil.ReadFile("/tmp/shadowtest/mnt/README.md") assert.Nil(t, err) assert.Equal(t, []byte("The S3 driver is working.\n"), content) // Test umounting s3driver.Umount("/tmp/shadowtest/mnt") _, err = ioutil.ReadFile("/tmp/shadowtest/mnt/README.md") assert.NotNil(t, err) }
explode_data.jsonl/39064
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 468 }
[ 2830, 3393, 50, 18, 18902, 1245, 629, 1155, 353, 8840, 836, 8, 341, 1903, 18, 12521, 1669, 328, 18, 18902, 515, 197, 197, 27380, 25, 220, 330, 8301, 25, 24, 15, 15, 15, 756, 197, 12791, 11152, 25, 262, 330, 32952, 1944, 756, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWait(t *testing.T) { t.Parallel() ctx, cancel := context.WithCancel(context.Background()) go func() { time.Sleep(100 * time.Millisecond) cancel() }() assert.NoError(t, wait(ctx, 10*time.Millisecond)) }
explode_data.jsonl/82066
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 14190, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 30680, 2915, 368, 341, 197, 21957, 31586, 7, 16, 15, 15, 353, 882, 71482, 340, 197, 84441, 741, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGetUncheckedCertificates(t *testing.T) { wildcardMap := make(map[string]*tls.Certificate) wildcardMap["*.traefik.wtf"] = &tls.Certificate{} wildcardSafe := &safe.Safe{} wildcardSafe.Set(wildcardMap) domainMap := make(map[string]*tls.Certificate) domainMap["traefik.wtf"] = &tls.Certificate{} domainSafe := &safe.Safe{} domainSafe.Set(domainMap) // FIXME Add a test for DefaultCertificate testCases := []struct { desc string dynamicCerts *safe.Safe resolvingDomains map[string]struct{} acmeCertificates []*Certificate domains []string expectedDomains []string }{ { desc: "wildcard to generate", domains: []string{"*.traefik.wtf"}, expectedDomains: []string{"*.traefik.wtf"}, }, { desc: "wildcard already exists in dynamic certificates", domains: []string{"*.traefik.wtf"}, dynamicCerts: wildcardSafe, expectedDomains: nil, }, { desc: "wildcard already exists in ACME certificates", domains: []string{"*.traefik.wtf"}, acmeCertificates: []*Certificate{ { Domain: types.Domain{Main: "*.traefik.wtf"}, }, }, expectedDomains: nil, }, { desc: "domain CN and SANs to generate", domains: []string{"traefik.wtf", "foo.traefik.wtf"}, expectedDomains: []string{"traefik.wtf", "foo.traefik.wtf"}, }, { desc: "domain CN already exists in dynamic certificates and SANs to generate", domains: []string{"traefik.wtf", "foo.traefik.wtf"}, dynamicCerts: domainSafe, expectedDomains: []string{"foo.traefik.wtf"}, }, { desc: "domain CN already exists in ACME certificates and SANs to generate", domains: []string{"traefik.wtf", "foo.traefik.wtf"}, acmeCertificates: []*Certificate{ { Domain: types.Domain{Main: "traefik.wtf"}, }, }, expectedDomains: []string{"foo.traefik.wtf"}, }, { desc: "domain already exists in dynamic certificates", domains: []string{"traefik.wtf"}, dynamicCerts: domainSafe, expectedDomains: nil, }, { desc: "domain already exists in ACME certificates", domains: []string{"traefik.wtf"}, acmeCertificates: []*Certificate{ { Domain: types.Domain{Main: "traefik.wtf"}, }, }, expectedDomains: nil, }, { desc: "domain matched by wildcard in dynamic certificates", domains: []string{"who.traefik.wtf", "foo.traefik.wtf"}, dynamicCerts: wildcardSafe, expectedDomains: nil, }, { desc: "domain matched by wildcard in ACME certificates", domains: []string{"who.traefik.wtf", "foo.traefik.wtf"}, acmeCertificates: []*Certificate{ { Domain: types.Domain{Main: "*.traefik.wtf"}, }, }, expectedDomains: nil, }, { desc: "root domain with wildcard in ACME certificates", domains: []string{"traefik.wtf", "foo.traefik.wtf"}, acmeCertificates: []*Certificate{ { Domain: types.Domain{Main: "*.traefik.wtf"}, }, }, expectedDomains: []string{"traefik.wtf"}, }, { desc: "all domains already managed by ACME", domains: []string{"traefik.wtf", "foo.traefik.wtf"}, resolvingDomains: map[string]struct{}{ "traefik.wtf": {}, "foo.traefik.wtf": {}, }, expectedDomains: []string{}, }, { desc: "one domain already managed by ACME", domains: []string{"traefik.wtf", "foo.traefik.wtf"}, resolvingDomains: map[string]struct{}{ "traefik.wtf": {}, }, expectedDomains: []string{"foo.traefik.wtf"}, }, { desc: "wildcard domain already managed by ACME checks the domains", domains: []string{"bar.traefik.wtf", "foo.traefik.wtf"}, resolvingDomains: map[string]struct{}{ "*.traefik.wtf": {}, }, expectedDomains: []string{}, }, { desc: "wildcard domain already managed by ACME checks domains and another domain checks one other domain, one domain still unchecked", domains: []string{"traefik.wtf", "bar.traefik.wtf", "foo.traefik.wtf", "acme.wtf"}, resolvingDomains: map[string]struct{}{ "*.traefik.wtf": {}, "traefik.wtf": {}, }, expectedDomains: []string{"acme.wtf"}, }, } for _, test := range testCases { test := test t.Run(test.desc, func(t *testing.T) { t.Parallel() if test.resolvingDomains == nil { test.resolvingDomains = make(map[string]struct{}) } acmeProvider := Provider{ certificateStore: &traefiktls.CertificateStore{ DynamicCerts: test.dynamicCerts, }, certificates: test.acmeCertificates, resolvingDomains: test.resolvingDomains, } domains := acmeProvider.getUncheckedDomains(context.Background(), test.domains, false) assert.Equal(t, len(test.expectedDomains), len(domains), "Unexpected domains.") }) } }
explode_data.jsonl/20639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2145 }
[ 2830, 3393, 1949, 96498, 97140, 1155, 353, 8840, 836, 8, 341, 6692, 695, 4951, 2227, 1669, 1281, 9147, 14032, 8465, 34488, 727, 20962, 340, 6692, 695, 4951, 2227, 1183, 19922, 2172, 823, 1579, 1418, 8935, 1341, 284, 609, 34488, 727, 209...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestQueryVotingInfo(t *testing.T) { cdc := codec.New() input := CreateTestInput(t) querier := NewQuerier(input.OracleKeeper) votingInfo := types.NewVotingInfo(ValAddrs[0], 7, 1, 32) input.OracleKeeper.SetVotingInfo(input.Ctx, ValAddrs[0], votingInfo) queryParams := types.NewQueryVotingInfoParams(ValAddrs[0]) bz, err := cdc.MarshalJSON(queryParams) require.NoError(t, err) req := abci.RequestQuery{ Path: "", Data: bz, } res, err := querier(input.Ctx, []string{types.QueryVotingInfo}, req) require.NoError(t, err) var resVotingInfo types.VotingInfo cdc.UnmarshalJSON(res, &resVotingInfo) require.Equal(t, votingInfo, resVotingInfo) }
explode_data.jsonl/47471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 2859, 53, 11519, 1731, 1155, 353, 8840, 836, 8, 341, 1444, 7628, 1669, 34647, 7121, 741, 22427, 1669, 4230, 2271, 2505, 1155, 340, 197, 15959, 1268, 1669, 1532, 2183, 261, 1268, 5384, 13, 48663, 77233, 692, 5195, 11519, 1731...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDescribeBuildSpec(t *testing.T) { tests := []struct { spec buildv1.BuildSpec want string }{ { spec: buildv1.BuildSpec{ CommonSpec: buildv1.CommonSpec{ Source: buildv1.BuildSource{ Git: &buildv1.GitBuildSource{ URI: "http://github.com/my/repository", }, ContextDir: "context", }, Strategy: buildv1.BuildStrategy{ DockerStrategy: &buildv1.DockerBuildStrategy{}, }, Output: buildv1.BuildOutput{ To: &corev1.ObjectReference{ Kind: "DockerImage", Name: "repository/data", }, }, }, }, want: "URL", }, { spec: buildv1.BuildSpec{ CommonSpec: buildv1.CommonSpec{ Source: buildv1.BuildSource{}, Strategy: buildv1.BuildStrategy{ SourceStrategy: &buildv1.SourceBuildStrategy{ From: corev1.ObjectReference{ Kind: "DockerImage", Name: "myimage:tag", }, }, }, Output: buildv1.BuildOutput{ To: &corev1.ObjectReference{ Kind: "DockerImage", Name: "repository/data", }, }, }, }, want: "Empty Source", }, { spec: buildv1.BuildSpec{ CommonSpec: buildv1.CommonSpec{ Source: buildv1.BuildSource{}, Strategy: buildv1.BuildStrategy{ CustomStrategy: &buildv1.CustomBuildStrategy{ From: corev1.ObjectReference{ Kind: "DockerImage", Name: "myimage:tag", }, }, }, Output: buildv1.BuildOutput{ To: &corev1.ObjectReference{ Kind: "DockerImage", Name: "repository/data", }, }, }, }, want: "Empty Source", }, { spec: buildv1.BuildSpec{ CommonSpec: buildv1.CommonSpec{ Source: buildv1.BuildSource{}, Strategy: buildv1.BuildStrategy{ JenkinsPipelineStrategy: &buildv1.JenkinsPipelineBuildStrategy{ Jenkinsfile: "openshiftBuild", }, }, }, }, want: "openshiftBuild", }, } for _, tt := range tests { var b bytes.Buffer out := tabwriter.NewWriter(&b, 0, 8, 0, '\t', 0) describeCommonSpec(tt.spec.CommonSpec, out) if err := out.Flush(); err != nil { t.Fatalf("%+v: flush error: %v", tt.spec, err) } if got := b.String(); !strings.Contains(got, tt.want) { t.Errorf("describeBuildSpec(%+v, out) = %q, should contain %q", tt.spec, got, tt.want) } } }
explode_data.jsonl/6487
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1171 }
[ 2830, 3393, 74785, 11066, 8327, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 98100, 1936, 85, 16, 25212, 8327, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 515, 298, 98100, 25, 1936, 85, 16, 25212, 8327, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestByteaArrayScanNil(t *testing.T) { arr := ByteaArray{{2}, {6}, {0, 0}} err := arr.Scan(nil) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr != nil { t.Errorf("Expected nil, got %+v", arr) } }
explode_data.jsonl/5313
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 7153, 64, 1857, 26570, 19064, 1155, 353, 8840, 836, 8, 341, 36511, 1669, 10906, 64, 1857, 2979, 17, 2137, 314, 21, 2137, 314, 15, 11, 220, 15, 11248, 9859, 1669, 2890, 54874, 27907, 692, 743, 1848, 961, 2092, 341, 197, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetMissingRole(t *testing.T) { auth := NewAuthenticator(gTestBucket, nil) role, err := auth.GetRole("noSuchRole") assert.Equals(t, err, nil) assert.True(t, role == nil) }
explode_data.jsonl/31557
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 1949, 25080, 9030, 1155, 353, 8840, 836, 8, 341, 78011, 1669, 1532, 5087, 61393, 3268, 2271, 36018, 11, 2092, 340, 197, 5778, 11, 1848, 1669, 4166, 2234, 9030, 445, 2152, 20706, 9030, 1138, 6948, 16207, 1155, 11, 1848, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGetDevice(t *testing.T) { /*cfg := new(mock_test.ConfigurationRepoMock) cfg.On("GetStr", config.ConfigDirectory, config.DefConfigDirectory).Return(devicesConfigDir) cfg.On("GetStr", config.DeviceConfig, "").Return(devicesFile) cfg.On("GetStr", config.ScriptsTemplatesDirectory, config.DefScriptsTemplatesDirectory).Return(config.DefScriptsTemplatesDirectory) cfg.On("GetStr", config.ScriptsTemplatesDevicesDirectory, config.DefScriptsTemplatesDevicesDirectory).Return(config.DefScriptsTemplatesDevicesDirectory) cfg.On("GetStr", config.ScriptsDirectory, config.DefScriptsDirectory).Return(config.DefScriptsDirectory) cfg.On("GetInt", config.DeviceTimeout, config.DefDeviceTimeout).Return(config.DefDeviceTimeout) repo := NewDeviceRepo(cfg) devices, err := repo.GetDevices() assert.Nil(t, err) assert.True(t, len(devices.Devices) > 0) device, err2 := repo.GetDevice("licht-arbeitszimmer") assert.Nil(t, err2) assert.Equal(t, device.DeviceKey, "licht-arbeitszimmer") assert.Equal(t, device.Type, "light") assert.Equal(t, device.Caption, "Arbeitszimmer") assert.Equal(t, device.Optimistic, true) assert.Equal(t, device.Room, "Licht") assert.Equal(t, device.Template, "shelly_1") assert.Equal(t, device.Mqtt.DeviceID, "shelly1-xxxxxx") assert.Equal(t, device.Rest.Url, "http://192.168.9.52") device, err2 = repo.GetDevice("unknown") assert.Errorf(t, err2, "cant find device with key unknown") assert.Nil(t, device) cfg.AssertExpectations(t) */ }
explode_data.jsonl/58529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 531 }
[ 2830, 3393, 1949, 6985, 1155, 353, 8840, 836, 8, 341, 197, 1057, 14072, 1669, 501, 30389, 4452, 17334, 25243, 11571, 340, 50286, 8071, 445, 1949, 2580, 497, 2193, 10753, 9310, 11, 2193, 49947, 2648, 9310, 568, 5598, 17622, 1216, 2648, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTemplates(t *testing.T) { if true { // Temporarily disable test so TravisCI reports build success instead of test failure. // NOTE: need travis to set sparkpost base urls etc, or mock http request return } cfgMap, err := test.LoadConfig() if err != nil { t.Error(err) return } cfg, err := sp.NewConfig(cfgMap) if err != nil { t.Error(err) return } var client sp.Client err = client.Init(cfg) if err != nil { t.Error(err) return } tlist, _, err := client.Templates() if err != nil { t.Error(err) return } t.Logf("templates listed: %+v", tlist) content := sp.Content{ Subject: "this is a test template", // NB: deliberate syntax error //Text: "text part of the test template {{a}", Text: "text part of the test template", From: map[string]string{ "name": "test name", "email": "test@email.com", }, } template := &sp.Template{Content: content, Name: "test template"} id, _, err := client.TemplateCreate(template) if err != nil { t.Error(err) return } fmt.Printf("Created Template with id=%s\n", id) d := map[string]interface{}{} res, err := client.TemplatePreview(id, &sp.PreviewOptions{d}) if err != nil { t.Error(err) return } fmt.Printf("Preview Template with id=%s and response %+v\n", id, res) _, err = client.TemplateDelete(id) if err != nil { t.Error(err) return } fmt.Printf("Deleted Template with id=%s\n", id) }
explode_data.jsonl/30293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 555 }
[ 2830, 3393, 51195, 1155, 353, 8840, 836, 8, 341, 743, 830, 341, 197, 197, 322, 19944, 96154, 11156, 1273, 773, 40710, 11237, 6682, 1936, 2393, 4518, 315, 1273, 7901, 624, 197, 197, 322, 16743, 25, 1184, 10137, 285, 311, 738, 15186, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestSyncProducer(t *testing.T) { mt := mocktracer.Start() defer mt.Stop() seedBroker := sarama.NewMockBroker(t, 1) defer seedBroker.Close() leader := sarama.NewMockBroker(t, 2) defer leader.Close() metadataResponse := new(sarama.MetadataResponse) metadataResponse.AddBroker(leader.Addr(), leader.BrokerID()) metadataResponse.AddTopicPartition("my_topic", 0, leader.BrokerID(), nil, nil, nil, sarama.ErrNoError) seedBroker.Returns(metadataResponse) prodSuccess := new(sarama.ProduceResponse) prodSuccess.AddTopicPartition("my_topic", 0, sarama.ErrNoError) leader.Returns(prodSuccess) cfg := sarama.NewConfig() cfg.Version = sarama.MinVersion cfg.Producer.Return.Successes = true producer, err := sarama.NewSyncProducer([]string{seedBroker.Addr()}, cfg) if err != nil { t.Fatal(err) } producer = WrapSyncProducer(cfg, producer) msg1 := &sarama.ProducerMessage{ Topic: "my_topic", Value: sarama.StringEncoder("test 1"), Metadata: "test", } producer.SendMessage(msg1) spans := mt.FinishedSpans() assert.Len(t, spans, 1) { s := spans[0] assert.Equal(t, "kafka", s.Tag(ext.ServiceName)) assert.Equal(t, "queue", s.Tag(ext.SpanType)) assert.Equal(t, "Produce Topic my_topic", s.Tag(ext.ResourceName)) assert.Equal(t, "kafka.produce", s.OperationName()) assert.Equal(t, int32(0), s.Tag("partition")) assert.Equal(t, int64(0), s.Tag("offset")) } }
explode_data.jsonl/28004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 564 }
[ 2830, 3393, 12154, 45008, 1155, 353, 8840, 836, 8, 341, 2109, 83, 1669, 7860, 94941, 12101, 741, 16867, 11965, 30213, 2822, 197, 22602, 65545, 1669, 274, 637, 64, 7121, 11571, 65545, 1155, 11, 220, 16, 340, 16867, 10320, 65545, 10421, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMaxAllowedPacket(t *testing.T) { // Test cases from issue 31422: https://github.com/pingcap/tidb/issues/31422 // The string "SELECT length('') as len;" has 25 chars, // so if the string inside '' has a length of 999, the total query reaches the max allowed packet size. const maxAllowedPacket = 1024 var ( inBuffer bytes.Buffer readBytes []byte ) // The length of total payload is (25 + 999 = 1024). bytes := append([]byte{0x00, 0x04, 0x00, 0x00}, []byte(fmt.Sprintf("SELECT length('%s') as len;", strings.Repeat("a", 999)))...) _, err := inBuffer.Write(bytes) require.NoError(t, err) brc := newBufferedReadConn(&bytesConn{inBuffer}) pkt := newPacketIO(brc) pkt.setMaxAllowedPacket(maxAllowedPacket) readBytes, err = pkt.readPacket() require.NoError(t, err) require.Equal(t, fmt.Sprintf("SELECT length('%s') as len;", strings.Repeat("a", 999)), string(readBytes)) require.Equal(t, uint8(1), pkt.sequence) // The length of total payload is (25 + 1000 = 1025). inBuffer.Reset() bytes = append([]byte{0x01, 0x04, 0x00, 0x00}, []byte(fmt.Sprintf("SELECT length('%s') as len;", strings.Repeat("a", 1000)))...) _, err = inBuffer.Write(bytes) require.NoError(t, err) brc = newBufferedReadConn(&bytesConn{inBuffer}) pkt = newPacketIO(brc) pkt.setMaxAllowedPacket(maxAllowedPacket) _, err = pkt.readPacket() require.Error(t, err) // The length of total payload is (25 + 488 = 513). // Two separate packets would NOT exceed the limitation of maxAllowedPacket. inBuffer.Reset() bytes = append([]byte{0x01, 0x02, 0x00, 0x00}, []byte(fmt.Sprintf("SELECT length('%s') as len;", strings.Repeat("a", 488)))...) _, err = inBuffer.Write(bytes) require.NoError(t, err) brc = newBufferedReadConn(&bytesConn{inBuffer}) pkt = newPacketIO(brc) pkt.setMaxAllowedPacket(maxAllowedPacket) readBytes, err = pkt.readPacket() require.NoError(t, err) require.Equal(t, fmt.Sprintf("SELECT length('%s') as len;", strings.Repeat("a", 488)), string(readBytes)) require.Equal(t, uint8(1), pkt.sequence) inBuffer.Reset() bytes = append([]byte{0x01, 0x02, 0x00, 0x01}, []byte(fmt.Sprintf("SELECT length('%s') as len;", strings.Repeat("b", 488)))...) _, err = inBuffer.Write(bytes) require.NoError(t, err) brc = newBufferedReadConn(&bytesConn{inBuffer}) pkt.setBufferedReadConn(brc) readBytes, err = pkt.readPacket() require.NoError(t, err) require.Equal(t, fmt.Sprintf("SELECT length('%s') as len;", strings.Repeat("b", 488)), string(readBytes)) require.Equal(t, uint8(2), pkt.sequence) }
explode_data.jsonl/73160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 966 }
[ 2830, 3393, 5974, 35382, 16679, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 5048, 504, 4265, 220, 18, 16, 19, 17, 17, 25, 3703, 1110, 5204, 905, 4322, 287, 11346, 5523, 307, 65, 38745, 14, 18, 16, 19, 17, 17, 198, 197, 322, 57...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServeHTTPSpecVersion(t *testing.T) { tests := []struct { specVersion string expectedStatus int }{ { // Missing specVersion -> HTTP 400 specVersion: "", expectedStatus: http.StatusBadRequest, }, { // Valid request specVersion: "0.3", expectedStatus: http.StatusNoContent, }, { // Future specVersion -> HTTP 400 specVersion: "999999.99", expectedStatus: http.StatusBadRequest, }, } for _, test := range tests { t.Run(test.specVersion, func(t *testing.T) { transport := &cehttp.Transport{} req := httptest.NewRequest("GET", "/", nil) req.Header.Set("ce-specversion", test.specVersion) w := httptest.NewRecorder() transport.ServeHTTP(w, req) actualStatus := w.Result().StatusCode if actualStatus != test.expectedStatus { t.Errorf("actual status (%d) != expected status (%d)", actualStatus, test.expectedStatus) t.Errorf("response body: %s", w.Body.String()) } }) } }
explode_data.jsonl/55108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 60421, 9230, 8327, 5637, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 98100, 5637, 262, 914, 198, 197, 42400, 2522, 526, 198, 197, 59403, 197, 197, 515, 298, 197, 322, 35264, 1398, 5637, 1464, 10130, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestComboButton_ContentVisible_Click(t *testing.T) { is := is.New(t) b := newComboButton(t) leftMouseButtonClick(b, t) is.True(b.ContentVisible) leftMouseButtonClick(b, t) is.True(!b.ContentVisible) }
explode_data.jsonl/72365
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 56893, 1567, 78383, 5715, 7163, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 374, 7121, 1155, 692, 2233, 1669, 501, 56893, 1567, 1155, 692, 35257, 76544, 2612, 1883, 11, 259, 340, 19907, 32443, 1883, 12614, 5715, 692, 35257, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestLuaScript(t *testing.T) { e := getTestExporter() for _, tst := range []struct { Script string ExpectedKeys int ExpectedError bool }{ { Script: `return {"a", "11", "b", "12", "c", "13"}`, ExpectedKeys: 3, }, { Script: `return {"key1", "6389"}`, ExpectedKeys: 1, }, { Script: `return {} `, ExpectedKeys: 0, }, { Script: `return {"key1" BROKEN `, ExpectedKeys: 0, ExpectedError: true, }, } { e.options.LuaScript = []byte(tst.Script) nKeys := tst.ExpectedKeys setupDBKeys(t, os.Getenv("TEST_REDIS_URI")) defer deleteKeysFromDB(t, os.Getenv("TEST_REDIS_URI")) chM := make(chan prometheus.Metric) go func() { e.Collect(chM) close(chM) }() scrapeErrorFound := false for m := range chM { if strings.Contains(m.Desc().String(), "test_script_value") { nKeys-- } if strings.Contains(m.Desc().String(), "exporter_last_scrape_error") { g := &dto.Metric{} m.Write(g) if g.GetGauge() != nil && *g.GetGauge().Value > 0 { scrapeErrorFound = true } } } if nKeys != 0 { t.Error("didn't find expected script keys") } if tst.ExpectedError { if !scrapeErrorFound { t.Error("didn't find expected scrape errors") } } } }
explode_data.jsonl/46992
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 618 }
[ 2830, 3393, 58020, 5910, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 633, 2271, 88025, 2822, 2023, 8358, 71707, 1669, 2088, 3056, 1235, 341, 197, 197, 5910, 286, 914, 198, 197, 197, 18896, 8850, 220, 526, 198, 197, 197, 18896, 1454, 180...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKubeServiceGetAll(t *testing.T) { testCases := []struct { data [][]byte err error }{ { data: [][]byte{[]byte(`{"id":"kube-name-1234"}`), []byte(`{"id":"56kube-name-5678"}`)}, err: nil, }, { data: nil, err: errors.New("test err"), }, } prefix := DefaultStoragePrefix for _, testCase := range testCases { m := new(testutils.MockStorage) m.On("GetAll", context.Background(), prefix).Return(testCase.data, testCase.err) service := NewService(prefix, m, nil) kubes, err := service.ListAll(context.Background()) if testCase.err != errors.Cause(err) { t.Errorf("Wrong error expected %v actual %v", testCase.err, err) return } if testCase.err == nil && len(kubes) != 2 { t.Errorf("Wrong len of kubes expected 2 actual %d", len(kubes)) } } }
explode_data.jsonl/1996
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 42, 3760, 1860, 1949, 2403, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 8924, 52931, 3782, 198, 197, 9859, 220, 1465, 198, 197, 59403, 197, 197, 515, 298, 8924, 25, 52931, 3782, 90, 1294, 3782, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5