text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func Test_SequenceTimeoutDelayedTask(t *testing.T) { projectName := "sequence-timeout-delay" serviceName := "my-service" sequenceStateShipyardFilePath, err := CreateTmpShipyardFile(sequenceTimeoutWithTriggeredAfterShipyard) require.Nil(t, err) defer os.Remove(sequenceStateShipyardFilePath) t.Logf("creating project %s", projectName) err = CreateProject(projectName, sequenceStateShipyardFilePath, true) require.Nil(t, err) t.Logf("creating service %s", serviceName) output, err := ExecuteCommand(fmt.Sprintf("keptn create service %s --project=%s", serviceName, projectName)) require.Nil(t, err) require.Contains(t, output, "created successfully") err = setShipyardControllerTaskTimeout(t, "10s") defer func() { _ = setShipyardControllerTaskTimeout(t, "20m") }() require.Nil(t, err) // trigger the task sequence t.Log("starting task sequence") keptnContextID, err := TriggerSequence(projectName, serviceName, "dev", "delivery", nil) require.Nil(t, err) // wait a minute and make verify that the sequence has not been timed out <-time.After(30 * time.Second) // also, the unknown.triggered event should not have been sent yet triggeredEvent, err := GetLatestEventOfType(keptnContextID, projectName, "dev", keptnv2.GetTriggeredEventType("unknown")) require.Nil(t, err) require.Nil(t, triggeredEvent) states, _, err := GetState(projectName) require.Nil(t, err) require.Len(t, states.States, 1) state := states.States[0] require.Equal(t, scmodels.SequenceStartedState, state.State) // after some time, the unknown.triggered event should be available require.Eventually(t, func() bool { triggeredEvent, err := GetLatestEventOfType(keptnContextID, projectName, "dev", keptnv2.GetTriggeredEventType("unknown")) if err != nil { return false } if triggeredEvent == nil { return false } return true }, 65*time.Second, 5*time.Second) }
explode_data.jsonl/59554
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 639 }
[ 2830, 3393, 26920, 4375, 7636, 57361, 6262, 1155, 353, 8840, 836, 8, 341, 72470, 675, 1669, 330, 15512, 7246, 411, 45171, 698, 52934, 675, 1669, 330, 2408, 23461, 698, 197, 15512, 1397, 29624, 17428, 19090, 11, 1848, 1669, 4230, 35986, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPluginDhcp1(t *testing.T) { a := &DhcpTestBase{ testname: "dhcp1", dropAll: false, monitor: false, match: 0, capture: true, duration: 120 * time.Second, clientsToSim: 1, } a.Run(t) }
explode_data.jsonl/80096
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 11546, 35, 62169, 16, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 609, 35, 62169, 2271, 3978, 515, 197, 18185, 606, 25, 257, 330, 96220, 16, 756, 197, 2698, 887, 2403, 25, 414, 895, 345, 197, 197, 32225, 25, 414, 895, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateBridgeType(t *testing.T) { t.Parallel() store, cleanup := cltest.NewStore(t) defer cleanup() tests := []struct { description string request models.BridgeTypeRequest want error }{ { "no adapter name", models.BridgeTypeRequest{ URL: cltest.WebURL(t, "https://denergy.eth"), }, models.NewJSONAPIErrorsWith("No name specified"), }, { "invalid adapter name", models.BridgeTypeRequest{ Name: "invalid/adapter", URL: cltest.WebURL(t, "https://denergy.eth"), }, models.NewJSONAPIErrorsWith("task type validation: name invalid/adapter contains invalid characters"), }, { "invalid with blank url", models.BridgeTypeRequest{ Name: "validadaptername", URL: cltest.WebURL(t, ""), }, models.NewJSONAPIErrorsWith("URL must be present"), }, { "valid url", models.BridgeTypeRequest{ Name: "adapterwithvalidurl", URL: cltest.WebURL(t, "//denergy"), }, nil, }, { "valid docker url", models.BridgeTypeRequest{ Name: "adapterwithdockerurl", URL: cltest.WebURL(t, "http://chainlink_cmc-adapter_1:8080"), }, nil, }, { "valid MinimumContractPayment positive", models.BridgeTypeRequest{ Name: "adapterwithdockerurl", URL: cltest.WebURL(t, "http://chainlink_cmc-adapter_1:8080"), MinimumContractPayment: assets.NewLink(1), }, nil, }, { "invalid MinimumContractPayment negative", models.BridgeTypeRequest{ Name: "adapterwithdockerurl", URL: cltest.WebURL(t, "http://chainlink_cmc-adapter_1:8080"), MinimumContractPayment: assets.NewLink(-1), }, models.NewJSONAPIErrorsWith("MinimumContractPayment must be positive"), }, { "existing core adapter", models.BridgeTypeRequest{ Name: "ethtx", URL: cltest.WebURL(t, "https://denergy.eth"), }, models.NewJSONAPIErrorsWith("Bridge Type ethtx is a native adapter"), }, { "new external adapter", models.BridgeTypeRequest{ Name: "gdaxprice", URL: cltest.WebURL(t, "https://denergy.eth"), }, nil, }} for _, test := range tests { t.Run(test.description, func(t *testing.T) { result := services.ValidateBridgeType(&test.request, store) assert.Equal(t, test.want, result) }) } }
explode_data.jsonl/75329
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1027 }
[ 2830, 3393, 17926, 32848, 929, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 57279, 11, 21290, 1669, 1185, 1944, 7121, 6093, 1155, 340, 16867, 21290, 2822, 78216, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 23555, 257, 41...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNetworksPruneError(t *testing.T) { client := &Client{ client: newMockClient(errorMock(http.StatusInternalServerError, "Server error")), version: "1.25", } filters := filters.NewArgs() _, err := client.NetworksPrune(context.Background(), filters) if err == nil || err.Error() != "Error response from daemon: Server error" { t.Fatalf("expected a Server Error, got %v", err) } if !errdefs.IsSystem(err) { t.Fatalf("expected a Server Error, got %T", err) } }
explode_data.jsonl/41334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 12320, 82, 3533, 2886, 1454, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 609, 2959, 515, 197, 25291, 25, 220, 501, 11571, 2959, 6390, 11571, 19886, 66760, 11, 330, 5475, 1465, 30154, 197, 74954, 25, 330, 16, 13, 17, 20, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetSCSIIdLun(t *testing.T) { tests := []struct { index int expectedScsiID int expectedLun int }{ {0, 0, 0}, {1, 0, 1}, {2, 0, 2}, {255, 0, 255}, {256, 1, 0}, {257, 1, 1}, {258, 1, 2}, {512, 2, 0}, {513, 2, 1}, } for _, test := range tests { scsiID, lun, err := GetSCSIIdLun(test.index) assert.Nil(t, err) if scsiID != test.expectedScsiID && lun != test.expectedLun { t.Fatalf("Expecting scsi-id:lun %d:%d, Got %d:%d", test.expectedScsiID, test.expectedLun, scsiID, lun) } } _, _, err := GetSCSIIdLun(maxSCSIDevices + 1) assert.NotNil(t, err) }
explode_data.jsonl/8213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 1949, 3540, 13817, 764, 43, 359, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 26327, 688, 526, 198, 197, 42400, 3326, 6321, 915, 526, 198, 197, 42400, 43, 359, 262, 526, 198, 197, 59403, 197, 197, 90,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSchemaRegistryClient_JsonSchemaParses(t *testing.T) { { server, call := mockServerWithSchemaResponse(t, "test1-value", "latest", schemaResponse{ Subject: "test1", Version: 1, Schema: "{\"type\": \"object\",\n\"properties\": {\n \"f1\": {\n \"type\": \"string\"\n }}}", ID: 1, References: nil, }) srClient := CreateSchemaRegistryClient(server.URL) schema1, err := srClient.GetLatestSchema("test1-value") // Test valid schema response assert.NoError(t, err) assert.Equal(t, 1, *call) var v interface{} assert.NotNil(t, schema1.JsonSchema()) assert.NoError(t, json.Unmarshal([]byte("{\"f1\": \"v1\"}"), &v)) assert.NoError(t, schema1.JsonSchema().Validate(v)) } { server, call := mockServerWithSchemaResponse(t, "test1-value", "latest", schemaResponse{ Subject: "test1", Version: 1, Schema: "payload", ID: 1, References: nil, }) srClient := CreateSchemaRegistryClient(server.URL) schema1, err := srClient.GetLatestSchema("test1-value") // Test invalid schema response assert.NoError(t, err) assert.Equal(t, 1, *call) assert.Nil(t, schema1.JsonSchema()) } }
explode_data.jsonl/73768
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 509 }
[ 2830, 3393, 8632, 15603, 2959, 62, 5014, 8632, 47, 1561, 288, 1155, 353, 8840, 836, 8, 341, 197, 515, 197, 41057, 11, 1618, 1669, 7860, 5475, 2354, 8632, 2582, 1155, 11, 330, 1944, 16, 19083, 497, 330, 19350, 497, 10802, 2582, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCloudProviderNodeShutdown(t *testing.T) { testCases := []struct { testName string node *v1.Node shutdown bool }{ { testName: "node shutdowned add taint", shutdown: true, node: &v1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: "node0", CreationTimestamp: metav1.Date(2012, 1, 1, 0, 0, 0, 0, time.UTC), }, Spec: v1.NodeSpec{ ProviderID: "node0", }, Status: v1.NodeStatus{ Conditions: []v1.NodeCondition{ { Type: v1.NodeReady, Status: v1.ConditionUnknown, LastHeartbeatTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), LastTransitionTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), }, }, }, }, }, { testName: "node started after shutdown remove taint", shutdown: false, node: &v1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: "node0", CreationTimestamp: metav1.Date(2012, 1, 1, 0, 0, 0, 0, time.UTC), }, Spec: v1.NodeSpec{ ProviderID: "node0", Taints: []v1.Taint{ { Key: algorithm.TaintNodeShutdown, Effect: v1.TaintEffectNoSchedule, }, }, }, Status: v1.NodeStatus{ Conditions: []v1.NodeCondition{ { Type: v1.NodeReady, Status: v1.ConditionTrue, LastHeartbeatTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), LastTransitionTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), }, }, }, }, }, } for _, tc := range testCases { t.Run(tc.testName, func(t *testing.T) { fnh := &testutil.FakeNodeHandler{ Existing: []*v1.Node{tc.node}, Clientset: fake.NewSimpleClientset(), } nodeController, _ := newNodeLifecycleControllerFromClient( nil, fnh, 10*time.Minute, testRateLimiterQPS, testRateLimiterQPS, testLargeClusterThreshold, testUnhealthyThreshold, testNodeMonitorGracePeriod, testNodeStartupGracePeriod, testNodeMonitorPeriod, false) nodeController.cloud = &fakecloud.FakeCloud{} nodeController.now = func() metav1.Time { return metav1.Date(2016, 1, 1, 12, 0, 0, 0, time.UTC) } nodeController.recorder = testutil.NewFakeRecorder() nodeController.nodeShutdownInCloudProvider = func(ctx context.Context, node *v1.Node) (bool, error) { return tc.shutdown, nil } if err := nodeController.syncNodeStore(fnh); err != nil { t.Errorf("unexpected error: %v", err) } if err := nodeController.monitorNodeHealth(); err != nil { t.Errorf("unexpected error: %v", err) } if len(fnh.UpdatedNodes) != 1 { t.Errorf("Node was not updated") } if tc.shutdown { if len(fnh.UpdatedNodes[0].Spec.Taints) != 1 { t.Errorf("Node Taint was not added") } if fnh.UpdatedNodes[0].Spec.Taints[0].Key != "node.cloudprovider.kubernetes.io/shutdown" { t.Errorf("Node Taint key is not correct") } } else { if len(fnh.UpdatedNodes[0].Spec.Taints) != 0 { t.Errorf("Node Taint was not removed after node is back in ready state") } } }) } }
explode_data.jsonl/9612
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1526 }
[ 2830, 3393, 16055, 5179, 1955, 62004, 1155, 353, 8840, 836, 8, 1476, 18185, 37302, 1669, 3056, 1235, 341, 197, 18185, 675, 914, 198, 197, 20831, 257, 353, 85, 16, 21714, 198, 197, 36196, 18452, 1807, 198, 197, 59403, 197, 197, 515, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSerialize(t *testing.T) { t.Log(`Should serialize todos correctly`) mockNow := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC) todo := Todo{ ID: 1, Title: MakeNullString("example title"), Note: MakeNullString("example note"), CreatedAt: mockNow, ModifiedAt: mockNow, DueAt: pq.NullTime{Time: mockNow, Valid: false}, UserID: 22, CompletedAt: pq.NullTime{Time: mockNow, Valid: false}, IsDone: false, } todoTests := make([]TodoTest, 4) todoTests[0].Todo = todo todoTests[0].Expected = `{"createdAt":"2009-11-10T23:00:00Z","id":1,"isDone":false,"modifiedAt":"2009-11-10T23:00:00Z","note":"example note","title":"example title"}` todo.DueAt.Valid = true todoTests[1].Todo = todo todoTests[1].Expected = `{"createdAt":"2009-11-10T23:00:00Z","dueAt":"2009-11-10T23:00:00Z","id":1,"isDone":false,"modifiedAt":"2009-11-10T23:00:00Z","note":"example note","title":"example title"}` todo.DueAt.Valid = false todo.CompletedAt.Valid = true todo.IsDone = true todoTests[2].Todo = todo todoTests[2].Expected = `{"completedAt":"2009-11-10T23:00:00Z","createdAt":"2009-11-10T23:00:00Z","id":1,"isDone":true,"modifiedAt":"2009-11-10T23:00:00Z","note":"example note","title":"example title"}` todo.CompletedAt.Valid = false todo.IsDone = false todo.Title.Valid = false todoTests[3].Todo = todo todoTests[3].Expected = `{"createdAt":"2009-11-10T23:00:00Z","id":1,"isDone":false,"modifiedAt":"2009-11-10T23:00:00Z","note":"example note"}` todo.Title.Valid = true for _, test := range todoTests { serialTodo := test.Todo.Serialize() data, err := json.Marshal(serialTodo) if err != nil { t.Error(err) t.Fail() } received := string(data) if strings.Compare(test.Expected, received) != 0 { fmt.Println(" === EXPECTED ===\n", test.Expected) fmt.Println(" === RECEIVED ===\n", received) t.Fail() } } }
explode_data.jsonl/44573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 826 }
[ 2830, 3393, 15680, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 5809, 14996, 24235, 15519, 12440, 24183, 77333, 7039, 1669, 882, 8518, 7, 17, 15, 15, 24, 11, 882, 2067, 859, 1377, 11, 220, 16, 15, 11, 220, 17, 18, 11, 220, 15, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGooseFSEngine_calculateMountPointsChanges(t *testing.T) { testCases := map[string]struct { mounted []string current []string expect map[string][]string }{ "calculate mount point changes test case 1": { mounted: []string{"hadoop3.3.0"}, current: []string{"hadoopcurrent", "hadoop3.3.0"}, expect: map[string][]string{"added": {"hadoopcurrent"}, "removed": {}}, }, "calculate mount point changes test case 2": { mounted: []string{"hadoopcurrent", "hadoop3.3.0"}, current: []string{"hadoop3.3.0"}, expect: map[string][]string{"added": {}, "removed": {"hadoopcurrent"}}, }, "calculate mount point changes test case 3": { mounted: []string{"hadoopcurrent", "hadoop3.2.2"}, current: []string{"hadoop3.3.0", "hadoop3.2.2"}, expect: map[string][]string{"added": {"hadoop3.3.0"}, "removed": {"hadoopcurrent"}}, }, "calculate mount point changes test case 4": { mounted: []string{"hadoop3.3.0"}, current: []string{"hadoop3.3.0"}, expect: map[string][]string{"added": {}, "removed": {}}, }, "calculate mount point changes test case 5": { mounted: []string{"hadoopcurrent", "hadoop3.2.2"}, current: []string{"hadoop3.3.0", "hadoop3.2.2", "hadoop3.3.1"}, expect: map[string][]string{"added": {"hadoop3.3.0", "hadoop3.3.1"}, "removed": {"hadoopcurrent"}}, }, } for _, item := range testCases { engine := &GooseFSEngine{} added, removed := engine.calculateMountPointsChanges(item.mounted, item.current) if !ArrayEqual(added, item.expect["added"]) { t.Errorf("expected added %v, got %v", item.expect["added"], added) } if !ArrayEqual(removed, item.expect["removed"]) { t.Errorf("expected removed %v, got %v", item.expect["removed"], removed) } } }
explode_data.jsonl/70293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 725 }
[ 2830, 3393, 10850, 960, 37, 925, 70891, 24005, 11207, 16284, 11411, 11317, 1155, 353, 8840, 836, 8, 1476, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 2109, 629, 291, 3056, 917, 198, 197, 20121, 3056, 917, 198, 197, 24952, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestWaitForCompletionFailed(t *testing.T) { size := 2 done := make(chan bool, size) totalWaitTime = time.Second assert.False(t, waitForCompletion(done)) totalWaitTime = 60 * time.Second }
explode_data.jsonl/62013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 14190, 2461, 33190, 9408, 1155, 353, 8840, 836, 8, 341, 13832, 1669, 220, 17, 198, 40495, 1669, 1281, 35190, 1807, 11, 1379, 340, 34493, 14190, 1462, 284, 882, 32435, 198, 6948, 50757, 1155, 11, 52223, 33190, 34232, 1171, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBuildTrafficConfiguration_MissingConfig(t *testing.T) { expected := &Config{ Targets: map[string]RevisionTargets{}, Configurations: map[string]*v1.Configuration{ goodConfig.Name: goodConfig, }, Revisions: map[string]*v1.Revision{ goodOldRev.Name: goodOldRev, goodNewRev.Name: goodNewRev, }, MissingTargets: []corev1.ObjectReference{{ APIVersion: "serving.knative.dev/v1", Kind: "Configuration", Name: missingConfig.Name, Namespace: missingConfig.Namespace, }}, } expectedErr := errMissingConfiguration(missingConfig.Name) r := testRouteWithTrafficTargets(WithSpecTraffic(v1.TrafficTarget{ RevisionName: goodOldRev.Name, Percent: ptr.Int64(100), }, v1.TrafficTarget{ Tag: "beta", RevisionName: goodNewRev.Name, }, v1.TrafficTarget{ Tag: "alpha", ConfigurationName: missingConfig.Name, })) if tc, err := BuildTrafficConfiguration(configLister, revLister, r); err != nil && expectedErr.Error() != err.Error() { t.Errorf("Expected %v, saw %v", expectedErr, err) } else if got, want := tc, expected; !cmp.Equal(want, got, cmpOpts...) { t.Errorf("Unexpected traffic diff (-want +got): %v", cmp.Diff(want, got, cmpOpts...)) } }
explode_data.jsonl/17615
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 499 }
[ 2830, 3393, 11066, 87229, 7688, 1245, 13577, 2648, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 609, 2648, 515, 197, 197, 49030, 25, 2415, 14032, 60, 33602, 49030, 38837, 197, 66156, 21449, 25, 2415, 14032, 8465, 85, 16, 17334, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestValidateWorks(t *testing.T) { if isValidEndpoint(&hostPortPair{}) { t.Errorf("Didn't fail for empty set") } if isValidEndpoint(&hostPortPair{host: "foobar"}) { t.Errorf("Didn't fail with invalid port") } if isValidEndpoint(&hostPortPair{host: "foobar", port: -1}) { t.Errorf("Didn't fail with a negative port") } if !isValidEndpoint(&hostPortPair{host: "foobar", port: 8080}) { t.Errorf("Failed a valid config.") } }
explode_data.jsonl/66171
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 17926, 37683, 1155, 353, 8840, 836, 8, 341, 743, 26171, 27380, 2099, 3790, 7084, 12443, 28875, 341, 197, 3244, 13080, 445, 86519, 944, 3690, 369, 4287, 738, 1138, 197, 532, 743, 26171, 27380, 2099, 3790, 7084, 12443, 90, 379...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDevConfigChange(t *testing.T) { testutil.Run(t, "test config change", func(t *testutil.T) { mockRunner := &mockConfigChangeRunner{} t.Override(&createRunner, func(config.SkaffoldOptions) (runner.Runner, *latest.SkaffoldConfig, error) { return mockRunner, &latest.SkaffoldConfig{}, nil }) t.Override(&opts, config.SkaffoldOptions{ Cleanup: true, NoPrune: false, }) err := doDev(context.Background(), ioutil.Discard) // ensure that we received the context.Canceled error (and not ErrorConfigurationChanged) // also ensure that the we run through dev cycles (since we reloaded on the first), // and exit after a real error is received t.CheckTrue(err == context.Canceled) t.CheckDeepEqual(mockRunner.cycles, 2) }) }
explode_data.jsonl/2040
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 14592, 2648, 4072, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 16708, 1155, 11, 330, 1944, 2193, 2297, 497, 2915, 1155, 353, 1944, 1314, 836, 8, 341, 197, 77333, 19486, 1669, 609, 16712, 2648, 4072, 19486, 31483, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCopyFileBackupDir(t *testing.T) { ctx := context.Background() ci := fs.GetConfig(ctx) r := fstest.NewRun(t) defer r.Finalise() if !operations.CanServerSideMove(r.Fremote) { t.Skip("Skipping test as remote does not support server-side move or copy") } oldBackupDir := ci.BackupDir ci.BackupDir = r.FremoteName + "/backup" defer func() { ci.BackupDir = oldBackupDir }() file1 := r.WriteFile("dst/file1", "file1 contents", t1) fstest.CheckItems(t, r.Flocal, file1) file1old := r.WriteObject(ctx, "dst/file1", "file1 contents old", t1) fstest.CheckItems(t, r.Fremote, file1old) err := operations.CopyFile(ctx, r.Fremote, r.Flocal, file1.Path, file1.Path) require.NoError(t, err) fstest.CheckItems(t, r.Flocal, file1) file1old.Path = "backup/dst/file1" fstest.CheckItems(t, r.Fremote, file1old, file1) }
explode_data.jsonl/51943
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 12106, 1703, 56245, 6184, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 72, 1669, 8619, 2234, 2648, 7502, 340, 7000, 1669, 48434, 477, 7121, 6727, 1155, 340, 16867, 435, 991, 977, 1064, 741, 743, 753, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGravatar(t *testing.T) { Convey("Gravatar", t, func() { url := Gravatar("fuxiaohei@vip.qq.com", 50) targetURL := "https://www.gravatar.com/avatar/f72f7454ce9d710baa506394f68f4132?size=50" So(url, ShouldEqual, targetURL) url2 := Gravatar("fuxiaohei@vip.qq.com", 0) targetURL = "https://www.gravatar.com/avatar/f72f7454ce9d710baa506394f68f4132?size=80" So(url2, ShouldEqual, targetURL) }) }
explode_data.jsonl/17752
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 6464, 11962, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 6464, 11962, 497, 259, 11, 2915, 368, 341, 197, 19320, 1669, 2825, 11962, 445, 69, 2200, 22516, 60852, 31, 64197, 60239, 905, 497, 220, 20, 15, 340, 197, 28861, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPaymentControlSubscribeFail(t *testing.T) { t.Parallel() t.Run("register attempt", func(t *testing.T) { testPaymentControlSubscribeFail(t, true) }) t.Run("no register attempt", func(t *testing.T) { testPaymentControlSubscribeFail(t, false) }) }
explode_data.jsonl/37952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 20188, 3273, 28573, 19524, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 16708, 445, 6343, 4774, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18185, 20188, 3273, 28573, 19524, 1155, 11, 830, 340, 197, 3518, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLookupValue(t *testing.T) { values := map[string]interface{}{ "foo": map[string]interface{}{ "bar": "baz", }, "very": map[string]interface{}{ "very": map[string]interface{}{ "very": map[string]interface{}{ "very": "deep", }, }, }, "a": "b", } for key, expectedResult := range map[string]interface{}{ "foo": nil, "foo.bar": "baz", "very": nil, "very.very": nil, "very.very.very": nil, "very.very.very.very": "deep", "very.very.very.very.deep": nil, "unknown": nil, "a": "b", } { result := LookupValueByString(key, values) if expectedResult == nil { assert.Nil(t, result) } else { assert.Equal(t, expectedResult, *result.(*string)) } } }
explode_data.jsonl/3663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 451 }
[ 2830, 3393, 34247, 1130, 1155, 353, 8840, 836, 8, 341, 45939, 1669, 2415, 14032, 31344, 67066, 197, 197, 1, 7975, 788, 2415, 14032, 31344, 67066, 298, 197, 1, 2257, 788, 330, 42573, 756, 197, 197, 1583, 197, 197, 1, 1204, 788, 2415, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestScopeResolutionOperator(t *testing.T) { testStr := `<? MyClass::myfunc($var); echo MyClass::myconst; echo $var::myfunc();` p := NewParser() p.disableScoping = true a, _ := p.Parse("test.php", testStr) tree := []ast.Node{ ast.ExprStmt{ Expr: &ast.ClassExpr{ Receiver: &ast.Identifier{Value: "MyClass"}, Expr: &ast.FunctionCallExpr{ FunctionName: &ast.Identifier{Value: "myfunc"}, Arguments: []ast.Expr{ ast.NewVariable("var"), }, }, }, }, ast.Echo(&ast.ClassExpr{ Receiver: &ast.Identifier{Value: "MyClass"}, Expr: ast.ConstantExpr{ Variable: ast.NewVariable("myconst"), }, }), ast.Echo(&ast.ClassExpr{ Receiver: ast.NewVariable("var"), Expr: &ast.FunctionCallExpr{ FunctionName: &ast.Identifier{Value: "myfunc"}, Arguments: []ast.Expr{}, }, }), } if !assertEquals(a.Nodes[0], tree[0]) { t.Fatalf("Scope resolution operator function call did not correctly parse") } if !assertEquals(a.Nodes[1], tree[1]) { t.Fatalf("Scope resolution operator expression did not correctly parse") } if !assertEquals(a.Nodes[2], tree[2]) { t.Fatalf("Scope resolution operator function call on identifier did not correctly parse") } }
explode_data.jsonl/28450
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 518 }
[ 2830, 3393, 10803, 38106, 18461, 1155, 353, 8840, 836, 8, 341, 18185, 2580, 1669, 1565, 53075, 220, 83826, 486, 2408, 2830, 699, 947, 317, 220, 1687, 83826, 486, 2408, 1024, 280, 220, 1687, 400, 947, 486, 2408, 2830, 2129, 3989, 3223, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAdmin_ReadOnly(t *testing.T) { ts, srv, teardown := startupT(t) defer teardown() c1 := store.Comment{Text: "test test #1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, User: store.User{Name: "user1 name", ID: "user1"}} c2 := store.Comment{Text: "test test #2", ParentID: "p1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, User: store.User{Name: "user2", ID: "user2"}} _, err := srv.DataService.Create(c1) assert.Nil(t, err) _, err = srv.DataService.Create(c2) assert.Nil(t, err) info, err := srv.DataService.Info(store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, 0) assert.Nil(t, err) assert.False(t, info.ReadOnly) // set post to read-only req, err := http.NewRequest(http.MethodPut, fmt.Sprintf("%s/api/v1/admin/readonly?site=radio-t&url=https://radio-t.com/blah&ro=1", ts.URL), nil) assert.Nil(t, err) resp, err := sendReq(t, req, "") // non-admin user require.NoError(t, err) assert.Equal(t, 401, resp.StatusCode) resp, err = sendReq(t, req, adminUmputunToken) require.NoError(t, err) assert.Equal(t, 200, resp.StatusCode) info, err = srv.DataService.Info(store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, 0) assert.Nil(t, err) assert.True(t, info.ReadOnly) // try to write comment c := store.Comment{Text: "test test #2", ParentID: "p1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}} b, err := json.Marshal(c) assert.Nil(t, err, "can't marshal comment %+v", c) req, err = http.NewRequest("POST", ts.URL+"/api/v1/comment", bytes.NewBuffer(b)) require.NoError(t, err) resp, err = sendReq(t, req, adminUmputunToken) require.NoError(t, err) assert.Equal(t, http.StatusForbidden, resp.StatusCode) // reset post's read-only req, err = http.NewRequest(http.MethodPut, fmt.Sprintf("%s/api/v1/admin/readonly?site=radio-t&url=https://radio-t.com/blah&ro=0", ts.URL), nil) assert.Nil(t, err) resp, err = sendReq(t, req, adminUmputunToken) require.NoError(t, err) assert.Equal(t, 200, resp.StatusCode) info, err = srv.DataService.Info(store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, 0) assert.Nil(t, err) assert.False(t, info.ReadOnly) // try to write comment c = store.Comment{Text: "test test #2", ParentID: "p1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}} b, err = json.Marshal(c) assert.Nil(t, err, "can't marshal comment %+v", c) req, err = http.NewRequest("POST", ts.URL+"/api/v1/comment", bytes.NewBuffer(b)) require.NoError(t, err) resp, err = sendReq(t, req, adminUmputunToken) require.NoError(t, err) assert.Equal(t, http.StatusCreated, resp.StatusCode) }
explode_data.jsonl/51828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1148 }
[ 2830, 3393, 7210, 62, 20914, 1155, 353, 8840, 836, 8, 341, 57441, 11, 43578, 11, 49304, 1669, 20567, 51, 1155, 340, 16867, 49304, 2822, 1444, 16, 1669, 3553, 56730, 90, 1178, 25, 330, 1944, 1273, 671, 16, 497, 98653, 25, 3553, 1214, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandleCVRun(t *testing.T) { t.Parallel() ctx := testutil.TestingContext() Convey(`Test CVRunPubSubHandler`, t, func() { Convey(`non chromium cv run is ignored`, func() { ctx, skdr := tq.TestingContext(ctx, nil) psRun := &cvv1.PubSubRun{ Id: "projects/fake/runs/run_id", Status: cvv1.Run_SUCCEEDED, } r := &http.Request{Body: makeCVRunReq(psRun)} processed, err := cvPubSubHandlerImpl(ctx, r) So(err, ShouldBeNil) So(processed, ShouldBeFalse) So(len(skdr.Tasks().Payloads()), ShouldEqual, 0) }) Convey(`chromium cv dry_run is ignored`, func() { ctx, skdr := tq.TestingContext(ctx, nil) rID := "id_dry_run" fID := fullRunID(rID) runs := map[string]*cvv0.Run{ fID: { Id: fID, Mode: "DRY_RUN", CreateTime: timestamppb.New(clock.Now(ctx)), }, } ctx = cv.UseFakeClient(ctx, runs) r := &http.Request{Body: makeCVChromiumRunReq(fID)} processed, err := cvPubSubHandlerImpl(ctx, r) So(err, ShouldBeNil) So(processed, ShouldBeFalse) So(len(skdr.Tasks().Payloads()), ShouldEqual, 0) }) Convey(`successful chromium cv full_run is processed`, func() { ctx, skdr := tq.TestingContext(ctx, nil) rID := "id_full_run" fID := fullRunID(rID) run := &cvv0.Run{ Id: fID, Mode: "FULL_RUN", CreateTime: timestamppb.New(clock.Now(ctx)), Tryjobs: []*cvv0.Tryjob{ tryjob(1), tryjob(2), }, } runs := map[string]*cvv0.Run{ fID: run, } ctx = cv.UseFakeClient(ctx, runs) r := &http.Request{Body: makeCVChromiumRunReq(fID)} processed, err := cvPubSubHandlerImpl(ctx, r) So(err, ShouldBeNil) So(processed, ShouldBeTrue) So(len(skdr.Tasks().Payloads()), ShouldEqual, 2) actTasks := make([]*taskspb.IngestTestResults, 0, len(skdr.Tasks().Payloads())) for _, pl := range skdr.Tasks().Payloads() { actTasks = append(actTasks, pl.(*taskspb.IngestTestResults)) } So(sortTasks(actTasks), ShouldResembleProto, sortTasks(expectedTasks(run))) }) Convey(`partial success`, func() { ctx, skdr := tq.TestingContext(ctx, nil) rID := "id_with_invalid_result" fID := fullRunID(rID) run := &cvv0.Run{ Id: fID, Mode: "FULL_RUN", CreateTime: timestamppb.New(clock.Now(ctx)), Tryjobs: []*cvv0.Tryjob{ tryjob(1), { Result: &cvv0.Tryjob_Result{}, }, }, } runs := map[string]*cvv0.Run{ fID: run, } ctx = cv.UseFakeClient(ctx, runs) r := &http.Request{Body: makeCVChromiumRunReq(fID)} processed, err := cvPubSubHandlerImpl(ctx, r) So(err, ShouldErrLike, "unrecognized CV run try job result") So(processed, ShouldBeTrue) So(len(skdr.Tasks().Payloads()), ShouldEqual, 1) So(skdr.Tasks().Payloads()[0].(*taskspb.IngestTestResults), ShouldResembleProto, expectedTasks(run)[0]) }) }) }
explode_data.jsonl/70801
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1398 }
[ 2830, 3393, 6999, 19589, 6727, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 1273, 1314, 8787, 287, 1972, 2822, 93070, 5617, 5809, 2271, 14165, 6727, 29162, 3136, 3050, 7808, 259, 11, 2915, 368, 341, 197, 93070, 561...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSendError(t *testing.T) { var resp string ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) w.Header().Set("Content-Type", "application/json") w.Write([]byte(resp)) })) defer ts.Close() ctx := context.Background() client, err := NewClient(ctx, testMessagingConfig) if err != nil { t.Fatal(err) } client.fcmEndpoint = ts.URL client.fcmClient.httpClient.RetryConfig = nil for idx, tc := range httpErrors { resp = tc.resp name, err := client.Send(ctx, &Message{Topic: "topic"}) if err == nil || err.Error() != tc.want || !tc.check(err) { t.Errorf("Send(%d) = (%q, %v); want = (%q, %q)", idx, name, err, "", tc.want) } } }
explode_data.jsonl/70176
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 11505, 1454, 1155, 353, 8840, 836, 8, 341, 2405, 9039, 914, 198, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6692, 69794, 19886, 66760, 340, 197, 6692...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_setFilter(t *testing.T) { testcases := []struct { answer string want string }{ {answer: "example", want: "Filters: ok"}, {answer: "", want: "Filters: regular expression cleared"}, {answer: "\n", want: "Filters: regular expression cleared"}, {answer: "[0-", want: "Filters: error parsing regexp: missing closing ]: `[0-`"}, } config := newConfig() config.view = config.views["activity"] config.view.OrderKey = 0 for _, tc := range testcases { assert.Equal(t, tc.want, setFilter(tc.answer, config.view)) } }
explode_data.jsonl/69227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 2602, 5632, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 72570, 914, 198, 197, 50780, 256, 914, 198, 197, 59403, 197, 197, 90, 9217, 25, 330, 8687, 497, 1366, 25, 330, 28351, 25, 5394, 7115, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCrMergedMakeFilesInRenamedDir(t *testing.T) { test(t, users("alice", "bob"), as(alice, mkdir("a/b"), ), as(bob, disableUpdates(), ), as(alice, write("a/b/c", "hello"), write("a/b/d", "goodbye"), ), as(bob, noSync(), rename("a/b", "b"), reenableUpdates(), lsdir("a", m{}), lsdir("b", m{"c": "FILE", "d": "FILE"}), read("b/c", "hello"), read("b/d", "goodbye"), ), as(alice, lsdir("a", m{}), lsdir("b", m{"c": "FILE", "d": "FILE"}), read("b/c", "hello"), read("b/d", "goodbye"), ), ) }
explode_data.jsonl/31372
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 16001, 44, 51525, 8078, 10809, 641, 34625, 3606, 6184, 1155, 353, 8840, 836, 8, 341, 18185, 1155, 345, 197, 90896, 445, 63195, 497, 330, 47086, 4461, 197, 60451, 17643, 558, 345, 298, 88650, 445, 64, 3470, 4461, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHookFileWillBeUploaded(t *testing.T) { t.Run("rejected", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() var mockAPI plugintest.API mockAPI.On("LoadPluginConfiguration", mock.Anything).Return(nil) mockAPI.On("LogDebug", "testhook.txt").Return(nil) mockAPI.On("LogDebug", "inputfile").Return(nil) tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "io" "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) FileWillBeUploaded(c *plugin.Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) { return nil, "rejected" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, func(*model.Manifest) plugin.API { return &mockAPI }) defer tearDown() _, err := th.App.UploadFiles( "noteam", th.BasicChannel.Id, th.BasicUser.Id, []io.ReadCloser{ioutil.NopCloser(bytes.NewBufferString("inputfile"))}, []string{"testhook.txt"}, []string{}, time.Now(), ) if assert.NotNil(t, err) { assert.Equal(t, "File rejected by plugin. rejected", err.Message) } }) t.Run("rejected, returned file ignored", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() var mockAPI plugintest.API mockAPI.On("LoadPluginConfiguration", mock.Anything).Return(nil) mockAPI.On("LogDebug", "testhook.txt").Return(nil) mockAPI.On("LogDebug", "inputfile").Return(nil) tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "fmt" "io" "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) FileWillBeUploaded(c *plugin.Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) { n, err := output.Write([]byte("ignored")) if err != nil { return info, fmt.Sprintf("FAILED to write output file n: %v, err: %v", n, err) } info.Name = "ignored" return info, "rejected" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, func(*model.Manifest) plugin.API { return &mockAPI }) defer tearDown() _, err := th.App.UploadFiles( "noteam", th.BasicChannel.Id, th.BasicUser.Id, []io.ReadCloser{ioutil.NopCloser(bytes.NewBufferString("inputfile"))}, []string{"testhook.txt"}, []string{}, time.Now(), ) if assert.NotNil(t, err) { assert.Equal(t, "File rejected by plugin. rejected", err.Message) } }) t.Run("allowed", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() var mockAPI plugintest.API mockAPI.On("LoadPluginConfiguration", mock.Anything).Return(nil) mockAPI.On("LogDebug", "testhook.txt").Return(nil) mockAPI.On("LogDebug", "inputfile").Return(nil) tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "io" "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) FileWillBeUploaded(c *plugin.Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) { return nil, "" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, func(*model.Manifest) plugin.API { return &mockAPI }) defer tearDown() response, err := th.App.UploadFiles( "noteam", th.BasicChannel.Id, th.BasicUser.Id, []io.ReadCloser{ioutil.NopCloser(bytes.NewBufferString("inputfile"))}, []string{"testhook.txt"}, []string{}, time.Now(), ) assert.Nil(t, err) assert.NotNil(t, response) assert.Equal(t, 1, len(response.FileInfos)) fileId := response.FileInfos[0].Id fileInfo, err := th.App.GetFileInfo(fileId) assert.Nil(t, err) assert.NotNil(t, fileInfo) assert.Equal(t, "testhook.txt", fileInfo.Name) fileReader, err := th.App.FileReader(fileInfo.Path) assert.Nil(t, err) var resultBuf bytes.Buffer io.Copy(&resultBuf, fileReader) assert.Equal(t, "inputfile", resultBuf.String()) }) t.Run("updated", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() var mockAPI plugintest.API mockAPI.On("LoadPluginConfiguration", mock.Anything).Return(nil) mockAPI.On("LogDebug", "testhook.txt").Return(nil) mockAPI.On("LogDebug", "inputfile").Return(nil) tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "io" "fmt" "bytes" "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) FileWillBeUploaded(c *plugin.Context, info *model.FileInfo, file io.Reader, output io.Writer) (*model.FileInfo, string) { var buf bytes.Buffer n, err := buf.ReadFrom(file) if err != nil { panic(fmt.Sprintf("buf.ReadFrom failed, reading %d bytes: %s", err.Error())) } outbuf := bytes.NewBufferString("changedtext") n, err = io.Copy(output, outbuf) if err != nil { panic(fmt.Sprintf("io.Copy failed after %d bytes: %s", n, err.Error())) } if n != 11 { panic(fmt.Sprintf("io.Copy only copied %d bytes", n)) } info.Name = "modifiedinfo" return info, "" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, func(*model.Manifest) plugin.API { return &mockAPI }) defer tearDown() response, err := th.App.UploadFiles( "noteam", th.BasicChannel.Id, th.BasicUser.Id, []io.ReadCloser{ioutil.NopCloser(bytes.NewBufferString("inputfile"))}, []string{"testhook.txt"}, []string{}, time.Now(), ) assert.Nil(t, err) assert.NotNil(t, response) assert.Equal(t, 1, len(response.FileInfos)) fileId := response.FileInfos[0].Id fileInfo, err := th.App.GetFileInfo(fileId) assert.Nil(t, err) assert.NotNil(t, fileInfo) assert.Equal(t, "modifiedinfo", fileInfo.Name) fileReader, err := th.App.FileReader(fileInfo.Path) assert.Nil(t, err) var resultBuf bytes.Buffer io.Copy(&resultBuf, fileReader) assert.Equal(t, "changedtext", resultBuf.String()) }) }
explode_data.jsonl/30307
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2723 }
[ 2830, 3393, 31679, 1703, 9945, 3430, 62061, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 95353, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 197, 16867, 270, 836, 682, 4454, 2822, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue6081(t *testing.T) { db := newTestDB(t, "people") defer closeDB(t, db) drv := db.Driver().(*fakeDriver) drv.mu.Lock() opens0 := drv.openCount closes0 := drv.closeCount drv.mu.Unlock() stmt, err := db.Prepare("SELECT|people|name|") if err != nil { t.Fatal(err) } setRowsCloseHook(func(rows *Rows, err *error) { *err = driver.ErrBadConn }) defer setRowsCloseHook(nil) for i := 0; i < 10; i++ { rows, err := stmt.Query() if err != nil { t.Fatal(err) } rows.Close() } if n := len(stmt.css); n > 1 { t.Errorf("len(css slice) = %d; want <= 1", n) } stmt.Close() if n := len(stmt.css); n != 0 { t.Errorf("len(css slice) after Close = %d; want 0", n) } drv.mu.Lock() opens := drv.openCount - opens0 closes := drv.closeCount - closes0 drv.mu.Unlock() if opens < 9 { t.Errorf("opens = %d; want >= 9", opens) } if closes < 9 { t.Errorf("closes = %d; want >= 9", closes) } }
explode_data.jsonl/16022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 434 }
[ 2830, 3393, 42006, 21, 15, 23, 16, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 330, 16069, 1138, 16867, 3265, 3506, 1155, 11, 2927, 692, 2698, 10553, 1669, 2927, 41768, 1005, 4071, 30570, 11349, 340, 2698, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_isTest(t *testing.T) { t.Parallel() type args struct { s string } tests := []struct { name string args args want bool }{ { name: "appveyor", args: args{ s: "appveyor", }, want: true, }, { name: "circleci", args: args{ s: "circleci", }, want: true, }, { name: "jenkins", args: args{ s: "jenkins", }, want: true, }, { name: "e2e", args: args{ s: "e2e", }, want: true, }, { name: "github-actions", args: args{ s: "github-actions", }, want: true, }, { name: "mergeable", args: args{ s: "mergeable", }, want: true, }, { name: "packit-as-a-service", args: args{ s: "packit-as-a-service", }, want: true, }, { name: "semaphoreci", args: args{ s: "semaphoreci", }, want: true, }, { name: "test", args: args{ s: "test", }, want: true, }, { name: "travis-ci", args: args{ s: "travis-ci", }, want: true, }, { name: "non-existing", args: args{ s: "non-existing", }, want: false, }, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() if got := isTest(tt.args.s); got != tt.want { t.Errorf("isTest() = %v, want %v for test %v", got, tt.want, tt.name) } }) } }
explode_data.jsonl/77207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 761 }
[ 2830, 3393, 6892, 2271, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 13158, 2827, 2036, 341, 197, 1903, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 1807, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_JsonFieldWithDefault_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip of JsonFieldWithDefault via JSON returns original", prop.ForAll(RunJSONSerializationTestForJsonFieldWithDefault, JsonFieldWithDefaultGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout)) }
explode_data.jsonl/39629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 62, 5014, 1877, 2354, 3675, 62, 4498, 77521, 78967, 98054, 2848, 4756, 2121, 2993, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535, 1695, 284, 220, 16, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConstantTimeLessOrEq(t *testing.T) { for i, test := range lessOrEqTests { result := ConstantTimeLessOrEq(test.x, test.y) if result != test.result { t.Errorf("#%d: %d <= %d gave %d, expected %d", i, test.x, test.y, result, test.result) } } }
explode_data.jsonl/79471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 15472, 1462, 27451, 2195, 27312, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 1273, 1669, 2088, 2686, 2195, 27312, 18200, 341, 197, 9559, 1669, 19305, 1462, 27451, 2195, 27312, 8623, 1993, 11, 1273, 2384, 340, 197, 743, 1102...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCVTGray(t *testing.T) { sources := GrayPaths targets := CVTPaths for index, source := range sources { if index >= len(targets) { return } img, err := imagex.LoadImage(RunningDir+"/"+source, formatx.Auto) if nil != err { fmt.Println(err) continue } fmt.Println("读取的图像内存类型(img):", reflect.ValueOf(img).Type()) dstImg := imagex.CopyImageStruct(img) err = CVTGray(img, dstImg, 42767) if nil != err { fmt.Println(err) continue } err = imagex.SaveImage(dstImg, RunningDir+"/"+targets[index], formatx.Auto, nil) if nil != err { fmt.Println(err) } } }
explode_data.jsonl/63317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 290 }
[ 2830, 3393, 19589, 51, 28174, 1155, 353, 8840, 836, 8, 341, 1903, 2360, 1669, 23366, 26901, 198, 28861, 82, 1669, 14165, 51, 26901, 198, 2023, 1922, 11, 2530, 1669, 2088, 8173, 341, 197, 743, 1922, 2604, 2422, 85120, 8, 341, 298, 853,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_Service_ErrorEcho(t *testing.T) { bkoff := backoff.NewExponentialBackOff() bkoff.MaxElapsedTime = time.Second * 15 err := backoff.Retry(func() error { logger, _ := test.NewNullLogger() logger.SetLevel(logrus.DebugLevel) c := service.NewContainer(logger) c.Register(ID, &Service{}) err := c.Init(&testCfg{httpCfg: `{ "enable": true, "address": ":6030", "maxRequestSize": 1024, "uploads": { "dir": ` + tmpDir() + `, "forbid": [] }, "workers":{ "command": "php ../../tests/http/client.php echoerr pipes", "relay": "pipes", "pool": { "numWorkers": 1, "allocateTimeout": 10000000, "destroyTimeout": 10000000 } } }`}) if err != nil { return err } s, st := c.Get(ID) assert.NotNil(t, s) assert.Equal(t, service.StatusOK, st) goterr := make(chan interface{}) s.(*Service).AddListener(func(event int, ctx interface{}) { if event == roadrunner.EventStderrOutput { if string(ctx.([]byte)) == "WORLD\n" { goterr <- nil } } }) go func() { err := c.Serve() if err != nil { t.Errorf("serve error: %v", err) } }() time.Sleep(time.Millisecond * 500) req, err := http.NewRequest("GET", "http://localhost:6030?hello=world", nil) if err != nil { c.Stop() return err } r, err := http.DefaultClient.Do(req) if err != nil { c.Stop() return err } b, err := ioutil.ReadAll(r.Body) if err != nil { c.Stop() return err } <-goterr assert.Equal(t, 201, r.StatusCode) assert.Equal(t, "WORLD", string(b)) err = r.Body.Close() if err != nil { c.Stop() return err } c.Stop() return nil }, bkoff) if err != nil { t.Fatal(err) } }
explode_data.jsonl/34500
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 806 }
[ 2830, 3393, 52548, 28651, 74994, 1155, 353, 8840, 836, 8, 341, 2233, 74, 1847, 1669, 1182, 1847, 7121, 840, 59825, 3707, 4596, 741, 2233, 74, 1847, 14535, 98483, 284, 882, 32435, 353, 220, 16, 20, 271, 9859, 1669, 1182, 1847, 2013, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNegateNonBool(t *testing.T) { fg := newFlagGroup() f := fg.Flag("b", "") f.Int() fg.init() tokens := tokenize([]string{"--no-b"}) _, err := fg.parse(tokens) assert.Error(t, err) }
explode_data.jsonl/74859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 47800, 349, 8121, 11233, 1155, 353, 8840, 836, 8, 341, 1166, 70, 1669, 501, 12135, 2808, 741, 1166, 1669, 29799, 80911, 445, 65, 497, 14676, 1166, 7371, 741, 1166, 70, 8271, 741, 3244, 9713, 1669, 77651, 10556, 917, 4913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestEphemeralVolumeOwnerCheck(t *testing.T) { defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.GenericEphemeralVolume, true)() // create dswp pod, pv, pvc := createEphemeralVolumeObjects("dswp-test-pod", "dswp-test-volume-name", false /* not owned */) dswp, fakePodManager, _, _, _ := createDswpWithVolume(t, pv, pvc) fakePodManager.AddPod(pod) podName := util.GetUniquePodName(pod) dswp.findAndAddNewPods() if dswp.pods.processedPods[podName] { t.Fatalf("%s should not have been processed by the populator", podName) } require.Equal(t, []string{fmt.Sprintf("error processing PVC %s/%s: not the ephemeral PVC for the pod", pvc.Namespace, pvc.Name)}, dswp.desiredStateOfWorld.PopPodErrors(podName), ) }
explode_data.jsonl/45751
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 293 }
[ 2830, 3393, 36, 59941, 3253, 18902, 13801, 3973, 1155, 353, 8840, 836, 8, 341, 16867, 4565, 70, 266, 57824, 287, 4202, 13859, 42318, 16014, 2271, 1155, 11, 4094, 12753, 13275, 13859, 42318, 11, 4419, 4341, 36, 59941, 3253, 18902, 11, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheckCreateAliasWithProofs(t *testing.T) { to, path := createCheckerTestObjects(t) defer func() { to.stor.close(t) err := common.CleanTemporaryDirs(path) assert.NoError(t, err, "failed to clean test data dirs") }() tx := createCreateAliasWithProofs(t) info := defaultCheckerInfo(t) _, err := to.tc.checkCreateAliasWithProofs(tx, info) assert.Error(t, err, "checkCreateAliasWithProofs did not fail prior to SmartAccounts activation") to.stor.activateFeature(t, int16(settings.SmartAccounts)) _, err = to.tc.checkCreateAliasWithProofs(tx, info) assert.NoError(t, err, "checkCreateAliasWithProofs failed with valid createAlias tx") to.stor.addBlock(t, blockID0) err = to.tp.performCreateAliasWithProofs(tx, defaultPerformerInfo(t)) assert.NoError(t, err, "performCreateAliasWithProofs failed") to.stor.flush(t) _, err = to.tc.checkCreateAliasWithProofs(tx, info) assert.Error(t, err, "checkCreateAliasWithProofs did not fail when using alias which is alredy taken") // Check that checker allows to steal aliases at specified timestamp window on MainNet. info.currentTimestamp = settings.MainNetSettings.StolenAliasesWindowTimeStart _, err = to.tc.checkCreateAliasWithProofs(tx, info) assert.NoError(t, err, "checkCreateAliasWithSig failed when stealing aliases is allowed") }
explode_data.jsonl/63093
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 441 }
[ 2830, 3393, 3973, 4021, 22720, 2354, 31076, 82, 1155, 353, 8840, 836, 8, 341, 31709, 11, 1815, 1669, 1855, 35188, 2271, 11543, 1155, 692, 16867, 2915, 368, 341, 197, 31709, 1236, 269, 4653, 1155, 692, 197, 9859, 1669, 4185, 727, 2675, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_ApplyConfigMap(t *testing.T) { namespace := "some-namespace" name := "some-name" currentConfigMap := corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{Name: name}, Data: map[string]string{"key1": "value1", "key2": "value2"}, } desiredConfigMap := corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{Name: name}, Data: map[string]string{"key2": "value2changed", "key3": "value3"}, } t.Run("Patch not existing config-map", func(t *testing.T) { t.Parallel() testEnv := getConfigMapTestEnv() err := testEnv.kubeUtil.ApplyConfigMap(namespace, &currentConfigMap, &desiredConfigMap) assert.NotNil(t, err) assert.Equal(t, "failed to patch config-map object: configmaps \"some-name\" not found", err.Error()) }) t.Run("Patch existing config-map", func(t *testing.T) { t.Parallel() testEnv := getConfigMapTestEnv() namespace := "some-namespace" name := "some-name" _, _ = testEnv.kubeclient.CoreV1().ConfigMaps(namespace).Create(context.TODO(), &currentConfigMap, metav1.CreateOptions{}) err := testEnv.kubeUtil.ApplyConfigMap(namespace, &currentConfigMap, &desiredConfigMap) configMap, err := testEnv.kubeUtil.GetConfigMap(namespace, name) assert.Nil(t, err) assert.Equal(t, name, configMap.ObjectMeta.Name) assert.Equal(t, namespace, configMap.ObjectMeta.Namespace) assert.True(t, radixutils.EqualStringMaps(desiredConfigMap.Data, configMap.Data)) }) }
explode_data.jsonl/55316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 548 }
[ 2830, 3393, 36117, 398, 2648, 2227, 1155, 353, 8840, 836, 8, 341, 56623, 1669, 330, 14689, 12, 2231, 698, 11609, 1669, 330, 14689, 11494, 698, 20121, 2648, 2227, 1669, 6200, 85, 16, 10753, 2227, 515, 197, 23816, 12175, 25, 77520, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMergeJoinerMultiBatch(t *testing.T) { ctx := context.Background() for _, groupSize := range []int{1, 2, coldata.BatchSize / 4, coldata.BatchSize / 2} { for _, numInputBatches := range []int{1, 2, 16} { for _, outBatchSize := range []uint16{1, 16, coldata.BatchSize} { t.Run(fmt.Sprintf("groupSize=%d/numInputBatches=%d", groupSize, numInputBatches), func(t *testing.T) { nTuples := coldata.BatchSize * numInputBatches typs := []types.T{types.Int64} cols := []coldata.Vec{coldata.NewMemColumn(typs[0], nTuples)} groups := cols[0].Int64() for i := range groups { groups[i] = int64(i) } leftSource := newChunkingBatchSource(typs, cols, uint64(nTuples)) rightSource := newChunkingBatchSource(typs, cols, uint64(nTuples)) a, err := NewMergeJoinOp( sqlbase.InnerJoin, leftSource, rightSource, []uint32{0}, []uint32{0}, typs, typs, []distsqlpb.Ordering_Column{{ColIdx: 0, Direction: distsqlpb.Ordering_Column_ASC}}, []distsqlpb.Ordering_Column{{ColIdx: 0, Direction: distsqlpb.Ordering_Column_ASC}}, ) if err != nil { t.Fatal("Error in merge join op constructor", err) } a.(*mergeJoinOp).initWithBatchSize(outBatchSize) i := 0 count := 0 // Keep track of the last comparison value. lastVal := int64(0) for b := a.Next(ctx); b.Length() != 0; b = a.Next(ctx) { count += int(b.Length()) outCol := b.ColVec(0).Int64() for j := int64(0); j < int64(b.Length()); j++ { outVal := outCol[j] expVal := lastVal if outVal != expVal { t.Fatalf("Found val %d, expected %d, idx %d of batch %d", outVal, expVal, j, i) } lastVal++ } i++ } if count != nTuples { t.Fatalf("Found count %d, expected count %d", count, nTuples) } }) } } } }
explode_data.jsonl/16867
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 976 }
[ 2830, 3393, 52096, 12292, 261, 20358, 21074, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 2023, 8358, 1874, 1695, 1669, 2088, 3056, 396, 90, 16, 11, 220, 17, 11, 1375, 691, 45791, 1695, 608, 220, 19, 11, 1375, 691, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAccountDetail(t *testing.T) { hmock := httptest.NewClient() client := &Client{ HorizonURL: "https://localhost/", HTTP: hmock, } // no parameters accountRequest := AccountRequest{} hmock.On( "GET", "https://localhost/accounts/GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU", ).ReturnString(200, accountResponse) _, err := client.AccountDetail(accountRequest) // error case: no account id if assert.Error(t, err) { assert.Contains(t, err.Error(), "No account ID provided") } // wrong parameters accountRequest = AccountRequest{DataKey: "test"} hmock.On( "GET", "https://localhost/accounts/GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU", ).ReturnString(200, accountResponse) _, err = client.AccountDetail(accountRequest) // error case: no account id if assert.Error(t, err) { assert.Contains(t, err.Error(), "No account ID provided") } accountRequest = AccountRequest{AccountId: "GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU"} // happy path hmock.On( "GET", "https://localhost/accounts/GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU", ).ReturnString(200, accountResponse) account, err := client.AccountDetail(accountRequest) if assert.NoError(t, err) { assert.Equal(t, account.ID, "GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU") assert.Equal(t, account.PT, "1") assert.Equal(t, account.Signers[0].Key, "GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU") assert.Equal(t, account.Signers[0].Type, "ed25519_public_key") assert.Equal(t, account.Data["test"], "dGVzdA==") balance, err := account.GetNativeBalance() assert.Nil(t, err) assert.Equal(t, balance, "9999.9999900") } // failure response hmock.On( "GET", "https://localhost/accounts/GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU", ).ReturnString(404, notFoundResponse) account, err = client.AccountDetail(accountRequest) if assert.Error(t, err) { assert.Contains(t, err.Error(), "Horizon error") horizonError, ok := err.(*Error) assert.Equal(t, ok, true) assert.Equal(t, horizonError.Problem.Title, "Resource Missing") } // connection error hmock.On( "GET", "https://localhost/accounts/GCLWGQPMKXQSPF776IU33AH4PZNOOWNAWGGKVTBQMIC5IMKUNP3E6NVU", ).ReturnError("http.Client error") _, err = client.AccountDetail(accountRequest) if assert.Error(t, err) { assert.Contains(t, err.Error(), "http.Client error") _, ok := err.(*Error) assert.Equal(t, ok, false) } }
explode_data.jsonl/34851
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1083 }
[ 2830, 3393, 7365, 10649, 1155, 353, 8840, 836, 8, 341, 9598, 16712, 1669, 54320, 70334, 7121, 2959, 741, 25291, 1669, 609, 2959, 515, 197, 13292, 269, 16973, 3144, 25, 330, 2428, 1110, 8301, 35075, 197, 197, 9230, 25, 981, 305, 16712, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestReadPublicKey(t *testing.T) { type test struct { caseDesc string inputFile string errorFound bool } tests := []test{ {caseDesc: "Not a valid public key file", inputFile: "testdata/hello_world.txt.minisig", errorFound: true}, {caseDesc: "Valid public key (minisign)", inputFile: "testdata/minisign.pub", errorFound: false}, {caseDesc: "Valid public key (signify)", inputFile: "testdata/signify.pub", errorFound: false}, } for _, tc := range tests { file, err := os.Open(tc.inputFile) if err != nil { t.Errorf("%v: cannot open %v", tc.caseDesc, tc.inputFile) } if got, err := NewPublicKey(file); ((got != nil) == tc.errorFound) || ((err != nil) != tc.errorFound) { t.Errorf("%v: unexpected result testing %v: %v", tc.caseDesc, tc.inputFile, err) } } }
explode_data.jsonl/24853
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 4418, 61822, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 2036, 341, 197, 2722, 11065, 256, 914, 198, 197, 22427, 1703, 220, 914, 198, 197, 18290, 6650, 1807, 198, 197, 630, 78216, 1669, 3056, 1944, 515, 197, 197, 90, 5638, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRecentlyTriedPodsGoBack(t *testing.T) { c := clock.NewFakeClock(time.Now()) q := NewTestQueue(context.Background(), newDefaultQueueSort(), WithClock(c)) // Add a few pods to priority queue. for i := 0; i < 5; i++ { p := v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf("test-pod-%v", i), Namespace: "ns1", UID: types.UID(fmt.Sprintf("tp00%v", i)), }, Spec: v1.PodSpec{ Priority: &highPriority, }, Status: v1.PodStatus{ NominatedNodeName: "node1", }, } q.Add(&p) } c.Step(time.Microsecond) // Simulate a pod being popped by the scheduler, determined unschedulable, and // then moved back to the active queue. p1, err := q.Pop() if err != nil { t.Errorf("Error while popping the head of the queue: %v", err) } // Update pod condition to unschedulable. podutil.UpdatePodCondition(&p1.PodInfo.Pod.Status, &v1.PodCondition{ Type: v1.PodScheduled, Status: v1.ConditionFalse, Reason: v1.PodReasonUnschedulable, Message: "fake scheduling failure", LastProbeTime: metav1.Now(), }) // Put in the unschedulable queue. q.AddUnschedulableIfNotPresent(p1, q.SchedulingCycle()) c.Step(DefaultPodInitialBackoffDuration) // Move all unschedulable pods to the active queue. q.MoveAllToActiveOrBackoffQueue(UnschedulableTimeout, nil) // Simulation is over. Now let's pop all pods. The pod popped first should be // the last one we pop here. for i := 0; i < 5; i++ { p, err := q.Pop() if err != nil { t.Errorf("Error while popping pods from the queue: %v", err) } if (i == 4) != (p1 == p) { t.Errorf("A pod tried before is not the last pod popped: i: %v, pod name: %v", i, p.PodInfo.Pod.Name) } } }
explode_data.jsonl/68198
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 716 }
[ 2830, 3393, 45137, 51, 4487, 23527, 82, 10850, 3707, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 8866, 7121, 52317, 26104, 9730, 13244, 2398, 18534, 1669, 1532, 2271, 7554, 5378, 19047, 1507, 501, 3675, 7554, 10231, 1507, 3085, 26104, 1337,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestWebSocketTLS(t *testing.T) { testSrv := getTestServer(t, "127.0.0.1:8000") defer testSrv.Close() cert := base64.StdEncoding.EncodeToString([]byte(certPem)) key := base64.StdEncoding.EncodeToString([]byte(keyPem)) wsYaml := ` kind: WebSocketServer name: websocket-demo port: 10081 https: true backend: wss://127.0.0.1:8000 certBase64: %v keyBase64: %v wssCertBase64: %v wssKeyBase64: %v ` wsYaml = fmt.Sprintf(wsYaml, cert, key, cert, key) super := supervisor.NewDefaultMock() superSpec, err := super.NewSpec(wsYaml) require.Nil(t, err) ws := &WebSocketServer{} ws.Init(superSpec) assert.Nil(t, ws.Validate()) time.Sleep(50 * time.Millisecond) ws.Close() }
explode_data.jsonl/65816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 61238, 45439, 1155, 353, 8840, 836, 8, 341, 18185, 50, 10553, 1669, 633, 2271, 5475, 1155, 11, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 23, 15, 15, 15, 1138, 16867, 1273, 50, 10553, 10421, 2822, 1444, 529, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMessageToQueryParametersRecursive(t *testing.T) { type test struct { MsgDescs []*descriptorpb.DescriptorProto Message string } tests := []test{ // First test: // Here we test that a message that references it self through a field will return an error. // Example proto: // message DirectRecursiveMessage { // DirectRecursiveMessage nested = 1; // } { MsgDescs: []*descriptorpb.DescriptorProto{ { Name: proto.String("DirectRecursiveMessage"), Field: []*descriptorpb.FieldDescriptorProto{ { Name: proto.String("nested"), Label: descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum(), Type: descriptorpb.FieldDescriptorProto_TYPE_MESSAGE.Enum(), TypeName: proto.String(".example.DirectRecursiveMessage"), Number: proto.Int32(1), }, }, }, }, Message: "DirectRecursiveMessage", }, // Second test: // Here we test that a cycle through multiple messages is detected and that an error is returned. // Sample: // message Root { NodeMessage nested = 1; } // message NodeMessage { CycleMessage nested = 1; } // message CycleMessage { Root nested = 1; } { MsgDescs: []*descriptorpb.DescriptorProto{ { Name: proto.String("RootMessage"), Field: []*descriptorpb.FieldDescriptorProto{ { Name: proto.String("nested"), Label: descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum(), Type: descriptorpb.FieldDescriptorProto_TYPE_MESSAGE.Enum(), TypeName: proto.String(".example.NodeMessage"), Number: proto.Int32(1), }, }, }, { Name: proto.String("NodeMessage"), Field: []*descriptorpb.FieldDescriptorProto{ { Name: proto.String("nested"), Label: descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum(), Type: descriptorpb.FieldDescriptorProto_TYPE_MESSAGE.Enum(), TypeName: proto.String(".example.CycleMessage"), Number: proto.Int32(1), }, }, }, { Name: proto.String("CycleMessage"), Field: []*descriptorpb.FieldDescriptorProto{ { Name: proto.String("nested"), Label: descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum(), Type: descriptorpb.FieldDescriptorProto_TYPE_MESSAGE.Enum(), TypeName: proto.String(".example.RootMessage"), Number: proto.Int32(1), }, }, }, }, Message: "RootMessage", }, } for _, test := range tests { reg := descriptor.NewRegistry() msgs := []*descriptor.Message{} for _, msgdesc := range test.MsgDescs { msgs = append(msgs, &descriptor.Message{DescriptorProto: msgdesc}) } file := descriptor.File{ FileDescriptorProto: &descriptorpb.FileDescriptorProto{ SourceCodeInfo: &descriptorpb.SourceCodeInfo{}, Name: proto.String("example.proto"), Package: proto.String("example"), Dependency: []string{}, MessageType: test.MsgDescs, Service: []*descriptorpb.ServiceDescriptorProto{}, Options: &descriptorpb.FileOptions{ GoPackage: proto.String("github.com/grpc-ecosystem/grpc-gateway/runtime/internal/examplepb;example"), }, }, GoPkg: descriptor.GoPackage{ Path: "example.com/path/to/example/example.pb", Name: "example_pb", }, Messages: msgs, } err := reg.Load(&pluginpb.CodeGeneratorRequest{ ProtoFile: []*descriptorpb.FileDescriptorProto{file.FileDescriptorProto}, }) if err != nil { t.Fatalf("failed to load code generator request: %v", err) } message, err := reg.LookupMsg("", ".example."+test.Message) if err != nil { t.Fatalf("failed to lookup message: %s", err) } _, err = messageToQueryParameters(message, reg, []descriptor.Parameter{}, nil) if err == nil { t.Fatalf("It should not be allowed to have recursive query parameters") } } }
explode_data.jsonl/32784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1688 }
[ 2830, 3393, 2052, 1249, 2859, 9706, 78542, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 2036, 341, 197, 197, 6611, 11065, 82, 29838, 53132, 16650, 23548, 6820, 31549, 198, 197, 46733, 220, 914, 198, 197, 630, 78216, 1669, 3056, 1944, 515,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestConnector_AttemptTimeout(t *testing.T) { listener, err := net.Listen("unix", "@1234") require.NoError(t, err) store := newStore(t, []string{listener.Addr().String()}) config := protocol.Config{ AttemptTimeout: 100 * time.Millisecond, RetryLimit: 1, } connector := protocol.NewConnector(0, store, config, logging.Test(t)) conns := []net.Conn{} go func() { conn, err := listener.Accept() require.NoError(t, err) conns = append(conns, conn) }() _, err = connector.Connect(context.Background()) assert.Equal(t, protocol.ErrNoAvailableLeader, err) for _, conn := range conns { conn.Close() } }
explode_data.jsonl/39280
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 35954, 55581, 4213, 7636, 1155, 353, 8840, 836, 8, 341, 14440, 798, 11, 1848, 1669, 4179, 68334, 445, 56646, 497, 8428, 16, 17, 18, 19, 1138, 17957, 35699, 1155, 11, 1848, 692, 57279, 1669, 501, 6093, 1155, 11, 3056, 917, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigLoadConfigurationFailMissingShim(t *testing.T) { tmpdir, err := ioutil.TempDir(testDir, "runtime-config-") assert.NoError(t, err) defer os.RemoveAll(tmpdir) testLoadConfiguration(t, tmpdir, func(config testRuntimeConfig, configFile string, ignoreLogging bool) (bool, error) { expectFail := true shimConfig, ok := config.RuntimeConfig.ShimConfig.(vc.ShimConfig) if !ok { return expectFail, fmt.Errorf("cannot determine shim config") } err = os.Remove(shimConfig.Path) if err != nil { return expectFail, err } return expectFail, nil }) }
explode_data.jsonl/5119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 2648, 5879, 7688, 19524, 25080, 2016, 318, 1155, 353, 8840, 836, 8, 341, 20082, 3741, 11, 1848, 1669, 43144, 65009, 6184, 8623, 6184, 11, 330, 22255, 25130, 12, 1138, 6948, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 103...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReplaceEdgeReturnOld(t *testing.T) { var ctx context.Context c := createClientFromEnv(t, true) skipBelowVersion(c, "3.4", t) // See https://github.com/arangodb/arangodb/issues/2363 db := ensureDatabase(ctx, c, "edge_test", nil, t) prefix := "replace_edge_returnOld_" g := ensureGraph(ctx, db, prefix+"graph", nil, t) ec := ensureEdgeCollection(ctx, g, prefix+"citiesPerState", []string{prefix + "city"}, []string{prefix + "state"}, t) cities := ensureCollection(ctx, db, prefix+"city", nil, t) states := ensureCollection(ctx, db, prefix+"state", nil, t) from := createDocument(ctx, cities, map[string]interface{}{"name": "Venlo"}, t) to := createDocument(ctx, states, map[string]interface{}{"name": "Limburg"}, t) doc := RouteEdge{ From: from.ID.String(), To: to.ID.String(), Distance: 123, } meta, err := ec.CreateDocument(ctx, doc) if err != nil { t.Fatalf("Failed to create new document: %s", describe(err)) } // Replace document replacement := RouteEdge{ From: to.ID.String(), To: from.ID.String(), Distance: 246, } var old RouteEdge ctx = driver.WithReturnOld(ctx, &old) if _, err := ec.ReplaceDocument(ctx, meta.Key, replacement); err != nil { t.Fatalf("Failed to replace document '%s': %s", meta.Key, describe(err)) } // Check old document if !reflect.DeepEqual(doc, old) { t.Errorf("Got wrong document. Expected %+v, got %+v", doc, old) } }
explode_data.jsonl/26402
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 541 }
[ 2830, 3393, 23107, 11656, 5598, 18284, 1155, 353, 8840, 836, 8, 341, 2405, 5635, 2266, 9328, 198, 1444, 1669, 1855, 2959, 3830, 14359, 1155, 11, 830, 340, 1903, 13389, 38214, 5637, 1337, 11, 330, 18, 13, 19, 497, 259, 8, 442, 3496, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReExportFSNode(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` export {fs as f} from './foo' export {readFileSync as rfs} from './foo' `, "/foo.js": ` export * as fs from 'fs' export {readFileSync} from 'fs' `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", Platform: config.PlatformNode, }, }) }
explode_data.jsonl/38501
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 693, 16894, 8485, 1955, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 59440, 314, 3848, 438, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntArray_Range(t *testing.T) { gtest.C(t, func(t *gtest.T) { value1 := []int{0, 1, 2, 3, 4, 5, 6} array1 := garray.NewIntArrayFrom(value1) array2 := garray.NewIntArrayFrom(value1, true) t.Assert(array1.Range(0, 1), []int{0}) t.Assert(array1.Range(1, 2), []int{1}) t.Assert(array1.Range(0, 2), []int{0, 1}) t.Assert(array1.Range(10, 2), nil) t.Assert(array1.Range(-1, 10), value1) t.Assert(array2.Range(1, 2), []int{1}) }) }
explode_data.jsonl/47599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 95338, 2568, 844, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 16309, 16, 1669, 3056, 396, 90, 15, 11, 220, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildWithoutTag(t *testing.T) { runWithDindSwarmAndRegistry(t, func(info dindSwarmAndRegistryInfo) { cmd := info.configuredCmd testDir := path.Join("testdata", "build") cmd.Command = dockerCli.Command("app", "build", "-f", path.Join(testDir, "single.dockerapp"), testDir) icmd.RunCmd(cmd).Assert(t, icmd.Success) cfg := getDockerConfigDir(t, cmd) f := path.Join(cfg, "app", "bundles", "contents", "sha256") infos, err := ioutil.ReadDir(f) assert.NilError(t, err) assert.Equal(t, len(infos), 1) id := infos[0].Name() f = path.Join(cfg, "app", "bundles", "contents", "sha256", id, image.BundleFilename) data, err := ioutil.ReadFile(f) assert.NilError(t, err) var bndl bundle.Bundle err = json.Unmarshal(data, &bndl) assert.NilError(t, err) built := []string{bndl.InvocationImages[0].Digest, bndl.Images["web"].Digest, bndl.Images["worker"].Digest} for _, ref := range built { cmd.Command = dockerCli.Command("inspect", ref) icmd.RunCmd(cmd).Assert(t, icmd.Success) } }) }
explode_data.jsonl/72003
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 438 }
[ 2830, 3393, 11066, 26040, 5668, 1155, 353, 8840, 836, 8, 341, 56742, 2354, 35, 484, 13218, 2178, 3036, 15603, 1155, 11, 2915, 14208, 294, 484, 13218, 2178, 3036, 15603, 1731, 8, 341, 197, 25920, 1669, 3546, 5423, 3073, 15613, 271, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestThingSave(t *testing.T) { thingCache := redis.NewThingCache(redisClient) key, err := uuid.New().ID() require.Nil(t, err, fmt.Sprintf("got unexpected error: %s", err)) id := "123" id2 := "124" err = thingCache.Save(context.Background(), key, id2) require.Nil(t, err, fmt.Sprintf("Save thing to cache: expected nil got %s", err)) cases := []struct { desc string ID string key string err error }{ { desc: "Save thing to cache", ID: id, key: key, err: nil, }, { desc: "Save already cached thing to cache", ID: id2, key: key, err: nil, }, } for _, tc := range cases { err := thingCache.Save(context.Background(), tc.key, tc.ID) assert.Nil(t, err, fmt.Sprintf("%s: expected %s got %s", tc.desc, tc.err, err)) } }
explode_data.jsonl/44672
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 52940, 8784, 1155, 353, 8840, 836, 8, 341, 197, 1596, 8233, 1669, 20870, 7121, 52940, 8233, 97676, 2959, 340, 23634, 11, 1848, 1669, 16040, 7121, 1005, 915, 741, 17957, 59678, 1155, 11, 1848, 11, 8879, 17305, 445, 22390, 165...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOptPort(t *testing.T) { assert := assert.New(t) var app App assert.Nil(OptPort(9999)(&app)) assert.Equal(":9999", app.Config.BindAddr) assert.Equal(9999, app.Config.Port) }
explode_data.jsonl/7707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 21367, 7084, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 2405, 906, 1845, 198, 6948, 59678, 19238, 417, 7084, 7, 24, 24, 24, 24, 8, 2099, 676, 1171, 6948, 12808, 18893, 24, 24, 24, 24, 497, 906, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAdapterReturn(t *testing.T) { a := assert.New(t) ctx := context.Background() response := &mavsdk_rpc_action.ReturnToLaunchResponse{ ActionResult: &mavsdk_rpc_action.ActionResult{ Result: mavsdk_rpc_action.ActionResult_SUCCESS, }, } actionMock := &actionServiceClientMock{} actionMock.On("ReturnToLaunch", mock.Anything, mock.Anything).Return(response, nil) ret := AdapterReturnInternal(ctx, actionMock) a.Nil(ret) }
explode_data.jsonl/76178
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 5940, 5598, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 692, 20985, 1669, 2266, 19047, 2822, 21735, 1669, 609, 76, 38751, 7584, 60799, 7931, 46350, 1249, 32067, 2582, 515, 197, 197, 17301, 25, 609, 76, 38751...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDatabaseClient(t *testing.T) { timestamp, err := time.Parse(time.RFC3339, "2021-05-24T05:06:34.827Z") if err != nil { t.Fatal(err) } t.Run("Get", func(t *testing.T) { tests := []struct { name string filePath string statusCode int id notionapi.DatabaseID want *notionapi.Database wantErr bool err error }{ { name: "returns database by id", id: "some_id", filePath: "testdata/database_get.json", statusCode: http.StatusOK, want: &notionapi.Database{ Object: notionapi.ObjectTypeDatabase, ID: "some_id", CreatedTime: timestamp, LastEditedTime: timestamp, Title: []notionapi.RichText{ { Type: notionapi.ObjectTypeText, Text: notionapi.Text{Content: "Test Database", Link: ""}, Annotations: &notionapi.Annotations{Color: "default"}, PlainText: "Test Database", Href: "", }, }, // Properties: notionapi.Properties{ // "Tags": notionapi.MultiSelectProperty{ // ID: ";s|V", // Type: notionapi.PropertyTypeMultiSelect, // MultiSelect: notionapi.Select{Options: []notionapi.Option{{ID: "id", Name: "tag", Color: "Blue"}}}, // }, // "Some another column": notionapi.PeopleProperty{ // ID: "rJt\\", // Type: notionapi.PropertyTypePeople, // }, // "SomeColumn": notionapi.RichTextProperty{ // ID: "~j_@", // Type: notionapi.PropertyTypeRichText, // RichText: notionapi.RichText{}, // }, // "Name": notionapi.TitleProperty{ // ID: "title", // Type: notionapi.PropertyTypeTitle, // Title: notionapi.RichText{}, // }, // }, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := newMockedClient(t, tt.filePath, tt.statusCode) client := notionapi.NewClient("some_token", notionapi.WithHTTPClient(c)) got, err := client.Database.Get(context.Background(), tt.id) if (err != nil) != tt.wantErr { t.Errorf("Get() error = %v, wantErr %v", err, tt.wantErr) return } // TODO: remove properties from comparing for a while. Have to compare with interface somehow got.Properties = nil if !reflect.DeepEqual(got, tt.want) { t.Errorf("Get() got = %v, want %v", got, tt.want) } }) } }) t.Run("List", func(t *testing.T) { tests := []struct { name string filePath string statusCode int want *notionapi.DatabaseListResponse wantErr bool err error }{ { name: "returns list of databases", filePath: "testdata/database_list.json", statusCode: http.StatusOK, want: &notionapi.DatabaseListResponse{ Object: notionapi.ObjectTypeList, Results: []notionapi.Database{ { Object: notionapi.ObjectTypeDatabase, ID: "some_id", CreatedTime: timestamp, LastEditedTime: timestamp, Title: notionapi.Paragraph{ { Type: notionapi.ObjectTypeText, Text: notionapi.Text{ Content: "Test Database", }, Annotations: &notionapi.Annotations{ Color: notionapi.ColorDefault, }, PlainText: "Test Database", }, }, }, }, HasMore: false, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := newMockedClient(t, tt.filePath, tt.statusCode) client := notionapi.NewClient("some_token", notionapi.WithHTTPClient(c)) got, err := client.Database.List(context.Background(), nil) if (err != nil) != tt.wantErr { t.Errorf("List() error = %v, wantErr %v", err, tt.wantErr) return } got.Results[0].Properties = nil if !reflect.DeepEqual(got, tt.want) { t.Errorf("List() got = %v, want %v", got, tt.want) } }) } }) t.Run("Query", func(t *testing.T) { tests := []struct { name string filePath string statusCode int id notionapi.DatabaseID request *notionapi.DatabaseQueryRequest want *notionapi.DatabaseQueryResponse wantErr bool err error }{ { name: "returns query results", id: "some_id", filePath: "testdata/database_query.json", statusCode: http.StatusOK, request: &notionapi.DatabaseQueryRequest{ Filter: &notionapi.PropertyFilter{ Property: "Name", Text: map[notionapi.Condition]string{ notionapi.ConditionContains: "Hel", }, }, }, want: &notionapi.DatabaseQueryResponse{ Object: notionapi.ObjectTypeList, Results: []notionapi.Page{ { Object: notionapi.ObjectTypePage, ID: "some_id", CreatedTime: timestamp, LastEditedTime: timestamp, Parent: notionapi.Parent{ Type: notionapi.ParentTypeDatabaseID, DatabaseID: "some_id", }, Archived: false, }, }, HasMore: false, NextCursor: "", }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := newMockedClient(t, tt.filePath, tt.statusCode) client := notionapi.NewClient("some_token", notionapi.WithHTTPClient(c)) got, err := client.Database.Query(context.Background(), tt.id, tt.request) if (err != nil) != tt.wantErr { t.Errorf("Query() error = %v, wantErr %v", err, tt.wantErr) return } got.Results[0].Properties = nil if !reflect.DeepEqual(got, tt.want) { t.Errorf("Query() got = %v, want %v", got, tt.want) } }) } }) }
explode_data.jsonl/51706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2845 }
[ 2830, 3393, 5988, 2959, 1155, 353, 8840, 836, 8, 341, 3244, 4702, 11, 1848, 1669, 882, 8937, 9730, 2013, 6754, 18, 18, 18, 24, 11, 330, 17, 15, 17, 16, 12, 15, 20, 12, 17, 19, 51, 15, 20, 25, 15, 21, 25, 18, 19, 13, 23, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDB_Update_ManualCommit(t *testing.T) { db := MustOpenDB() defer db.MustClose() var panicked bool if err := db.Update(func(tx *bolt.Tx) error { func() { defer func() { if r := recover(); r != nil { panicked = true } }() if err := tx.Commit(); err != nil { t.Fatal(err) } }() return nil }); err != nil { t.Fatal(err) } else if !panicked { t.Fatal("expected panic") } }
explode_data.jsonl/27480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 3506, 47393, 71628, 928, 33441, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 15465, 5002, 3506, 741, 16867, 2927, 50463, 7925, 2822, 2405, 93010, 1807, 198, 743, 1848, 1669, 2927, 16689, 18552, 27301, 353, 52433, 81362, 8, 1465,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVault_StoreAndRetrieve(t *testing.T) { testVaultItems := []vault.VaultItem{ { ItemType: vault.PrivateKeyWithMnemonic, Value: "SomePrivateKey", }, } storeVaultMock := func(w http.ResponseWriter, r *http.Request) { _, _ = w.Write([]byte(`{}`)) } serverMock := func() *httptest.Server { handler := http.NewServeMux() handler.HandleFunc("/vaults", storeVaultMock) srv := httptest.NewServer(handler) return srv } server := serverMock() defer server.Close() v := vault.New( // "https://f4nmmmkstb.execute-api.us-west-2.amazonaws.com/dev", // UNCOMMENT TO TEST REAL SERVER server.URL, testSaltSecret, ) storeRequest, err := v.Store(testUuid, testPassphrase, testAPIToken, testVaultItems) assert.Nil(t, err) if err != nil { return } assert.NotNil(t, storeRequest) assert.NotNil(t, storeRequest.Vault) assert.NotEqual(t, "", storeRequest.Vault) assert.NotNil(t, storeRequest.Vsk) assert.NotEqual(t, "", storeRequest.Vsk) retrieveVaultMock := func(w http.ResponseWriter, r *http.Request) { _, _ = w.Write([]byte(`{"encryptedVault": "` + storeRequest.Vault + `"}`)) } serverMock2 := func() *httptest.Server { handler := http.NewServeMux() handler.HandleFunc("/vaults/"+testUuid, retrieveVaultMock) srv := httptest.NewServer(handler) return srv } server2 := serverMock2() defer server2.Close() v2 := vault.New( // "https://f4nmmmkstb.execute-api.us-west-2.amazonaws.com/dev", // UNCOMMENT TO TEST REAL SERVER server2.URL, testSaltSecret, ) retrievedItems, err := v2.Retrieve(testUuid, testPassphrase) assert.Nil(t, err) if err != nil { return } assert.NotNil(t, retrievedItems) // Assert response matches what we initially vaulted assert.Equal(t, testVaultItems[0].ItemType, retrievedItems[0].ItemType) assert.Equal(t, testVaultItems[0].Value, retrievedItems[0].Value) }
explode_data.jsonl/11925
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 756 }
[ 2830, 3393, 79177, 92684, 3036, 87665, 1155, 353, 8840, 836, 8, 341, 18185, 79177, 4353, 1669, 3056, 82983, 5058, 945, 1234, 515, 197, 197, 515, 298, 61574, 929, 25, 34584, 87738, 1592, 2354, 44, 70775, 345, 298, 47399, 25, 262, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunAfterSuccessCanRun(t *testing.T) { tests := []struct { name string parent *kube.ProwJob child *kube.ProwJob changes []github.PullRequestChange err error expected bool }{ { name: "child does not require specific changes", parent: &kube.ProwJob{ Spec: kube.ProwJobSpec{ Job: "test-e2e", Type: kube.PresubmitJob, Refs: &kube.Refs{ Org: "kubernetes", Repo: "kubernetes", Pulls: []kube.Pull{ {Number: 123}, }, }, }, }, child: &kube.ProwJob{ Spec: kube.ProwJobSpec{ Job: "push-image", }, }, expected: true, }, { name: "child requires specific changes that are done", parent: &kube.ProwJob{ Spec: kube.ProwJobSpec{ Job: "test-bazel-build", Type: kube.PresubmitJob, Refs: &kube.Refs{ Org: "kubernetes", Repo: "kubernetes", Pulls: []kube.Pull{ {Number: 123}, }, }, }, }, child: &kube.ProwJob{ Spec: kube.ProwJobSpec{ Job: "test-kubeadm-cloud", }, }, changes: []github.PullRequestChange{ {Filename: "cmd/kubeadm/kubeadm.go"}, {Filename: "vendor/BUILD"}, {Filename: ".gitatrributes"}, }, expected: true, }, { name: "child requires specific changes that are not done", parent: &kube.ProwJob{ Spec: kube.ProwJobSpec{ Job: "test-bazel-build", Type: kube.PresubmitJob, Refs: &kube.Refs{ Org: "kubernetes", Repo: "kubernetes", Pulls: []kube.Pull{ {Number: 123}, }, }, }, }, child: &kube.ProwJob{ Spec: kube.ProwJobSpec{ Job: "test-kubeadm-cloud", }, }, changes: []github.PullRequestChange{ {Filename: "vendor/BUILD"}, {Filename: ".gitatrributes"}, }, expected: false, }, } for _, test := range tests { t.Logf("scenario %q", test.name) fakeGH := &fghc{ changes: test.changes, err: test.err, } c := Controller{ log: logrus.NewEntry(logrus.StandardLogger()), cfg: newFakeConfigAgent(t, 0, nil).Config, ghc: fakeGH, } got := c.RunAfterSuccessCanRun(test.parent, test.child) if got != test.expected { t.Errorf("expected to run: %t, got: %t", test.expected, got) } } }
explode_data.jsonl/72990
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1146 }
[ 2830, 3393, 6727, 6025, 7188, 6713, 6727, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 271, 197, 24804, 353, 97717, 1069, 651, 12245, 198, 197, 58391, 220, 353, 97717, 1069, 651, 12245, 271, 197, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVppAddArpTerminationTableEntry(t *testing.T) { ctx, bdHandler, _ := bdTestSetup(t) defer ctx.TeardownTestCtx() ctx.MockVpp.MockReply(&vpp_l2.BdIPMacAddDelReply{}) err := bdHandler.AddArpTerminationTableEntry( 4, "FF:FF:FF:FF:FF:FF", "192.168.4.4") Expect(err).ShouldNot(HaveOccurred()) Expect(ctx.MockChannel.Msg).To(Equal(&vpp_l2.BdIPMacAddDel{ Entry: vpp_l2.BdIPMac{ BdID: 4, IP: vpp_l2.Address{ Af: vpp_l2.ADDRESS_IP4, Un: vpp_l2.AddressUnionIP4( vpp_l2.IP4Address{192, 168, 4, 4}, ), }, Mac: vpp_l2.MacAddress{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, }, IsAdd: 1, })) }
explode_data.jsonl/12045
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 53, 602, 2212, 6953, 79, 21209, 32096, 2556, 5874, 1155, 353, 8840, 836, 8, 341, 20985, 11, 21486, 3050, 11, 716, 1669, 21486, 2271, 21821, 1155, 340, 16867, 5635, 94849, 37496, 2271, 23684, 2822, 20985, 24664, 53, 602, 2466...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestList(t *testing.T) { l := MapItem{} l.Empty() m := NewMap(&l) if 0 != len(m.Items()) { t.Error() } for _, e := range list { m.Set(e.k, &MapItem{Value: e}, e.f) } if len(list) != len(m.Items()) { t.Error() } newlist := make([]int, len(list)) i := 0 l.Iterate(func(list, item *MapItem) bool { newlist[i] = item.Value.(testItem).v i++ return true }) j := 0 for _, e := range list { if e.f { if newlist[j] != e.v { t.Error() } j++ } } if j != i { t.Error() } }
explode_data.jsonl/74884
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 268 }
[ 2830, 3393, 852, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 5027, 1234, 16094, 8810, 11180, 2822, 2109, 1669, 1532, 2227, 2099, 75, 340, 743, 220, 15, 961, 2422, 1255, 12054, 2140, 341, 197, 3244, 6141, 741, 197, 630, 2023, 8358, 384, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFleetWithLongLabelsAnnotations(t *testing.T) { t.Parallel() ctx := context.Background() client := framework.AgonesClient.AgonesV1() fleetSize := int32(1) flt := defaultFleet(framework.Namespace) flt.Spec.Replicas = fleetSize normalLengthName := strings.Repeat("f", validation.LabelValueMaxLength) longName := normalLengthName + "f" flt.Spec.Template.ObjectMeta.Labels = make(map[string]string) flt.Spec.Template.ObjectMeta.Labels["label"] = longName _, err := client.Fleets(framework.Namespace).Create(ctx, flt, metav1.CreateOptions{}) assert.Error(t, err) statusErr, ok := err.(*k8serrors.StatusError) assert.True(t, ok) assert.Len(t, statusErr.Status().Details.Causes, 1) assert.Equal(t, metav1.CauseTypeFieldValueInvalid, statusErr.Status().Details.Causes[0].Type) assert.Equal(t, "labels", statusErr.Status().Details.Causes[0].Field) // Set Label to normal size and add Annotations with an error flt.Spec.Template.ObjectMeta.Labels["label"] = normalLengthName flt.Spec.Template.ObjectMeta.Annotations = make(map[string]string) flt.Spec.Template.ObjectMeta.Annotations[longName] = normalLengthName _, err = client.Fleets(framework.Namespace).Create(ctx, flt, metav1.CreateOptions{}) assert.Error(t, err) statusErr, ok = err.(*k8serrors.StatusError) assert.True(t, ok) assert.Len(t, statusErr.Status().Details.Causes, 1) assert.Equal(t, "annotations", statusErr.Status().Details.Causes[0].Field) assert.Equal(t, metav1.CauseTypeFieldValueInvalid, statusErr.Status().Details.Causes[0].Type) goodFlt := defaultFleet(framework.Namespace) goodFlt.Spec.Template.ObjectMeta.Labels = make(map[string]string) goodFlt.Spec.Template.ObjectMeta.Labels["label"] = normalLengthName goodFlt, err = client.Fleets(framework.Namespace).Create(ctx, goodFlt, metav1.CreateOptions{}) if assert.Nil(t, err) { defer client.Fleets(framework.Namespace).Delete(ctx, goodFlt.ObjectMeta.Name, metav1.DeleteOptions{}) // nolint:errcheck } err = framework.WaitForFleetCondition(t, goodFlt, e2e.FleetReadyCount(goodFlt.Spec.Replicas)) assert.Nil(t, err) // Verify validation on Update() flt, err = client.Fleets(framework.Namespace).Get(ctx, goodFlt.ObjectMeta.GetName(), metav1.GetOptions{}) assert.Nil(t, err) goodFlt = flt.DeepCopy() goodFlt.Spec.Template.ObjectMeta.Annotations = make(map[string]string) goodFlt.Spec.Template.ObjectMeta.Annotations[longName] = normalLengthName _, err = client.Fleets(framework.Namespace).Update(ctx, goodFlt, metav1.UpdateOptions{}) assert.Error(t, err) statusErr, ok = err.(*k8serrors.StatusError) assert.True(t, ok) assert.Len(t, statusErr.Status().Details.Causes, 1) assert.Equal(t, "annotations", statusErr.Status().Details.Causes[0].Field) assert.Equal(t, metav1.CauseTypeFieldValueInvalid, statusErr.Status().Details.Causes[0].Type) // Make sure normal annotations path Validation on Update flt, err = client.Fleets(framework.Namespace).Get(ctx, goodFlt.ObjectMeta.GetName(), metav1.GetOptions{}) assert.Nil(t, err) goodFlt = flt.DeepCopy() goodFlt.Spec.Template.ObjectMeta.Annotations = make(map[string]string) goodFlt.Spec.Template.ObjectMeta.Annotations[normalLengthName] = longName _, err = client.Fleets(framework.Namespace).Update(ctx, goodFlt, metav1.UpdateOptions{}) assert.Nil(t, err) }
explode_data.jsonl/15431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1182 }
[ 2830, 3393, 37, 18973, 2354, 6583, 23674, 21418, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 2266, 19047, 2822, 25291, 1669, 12626, 49850, 3154, 2959, 49850, 3154, 53, 16, 741, 1166, 18973, 1695, 1669, 526, 18, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSuperHashSetGet(t *testing.T) { hashmap := New() k1, k2, k3, value := 1, true, 3, 4 hashmap.Set(k1, k2, k3, value) if hashmap.Get(k1, k2, k3) != value { t.Error("invalid value for keys") } }
explode_data.jsonl/82207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 19284, 44601, 1949, 1155, 353, 8840, 836, 8, 341, 50333, 2186, 1669, 1532, 741, 16463, 16, 11, 595, 17, 11, 595, 18, 11, 897, 1669, 220, 16, 11, 830, 11, 220, 18, 11, 220, 19, 198, 50333, 2186, 4202, 5969, 16, 11, 59...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMarshalAppendWithContext(t *testing.T) { for _, tc := range marshalingTestCases { t.Run(tc.name, func(t *testing.T) { dst := make([]byte, 0, 1024) var reg *bsoncodec.Registry if tc.reg != nil { reg = tc.reg } else { reg = DefaultRegistry } ec := bsoncodec.EncodeContext{Registry: reg} got, err := MarshalAppendWithContext(ec, dst, tc.val) noerr(t, err) if !bytes.Equal(got, tc.want) { t.Errorf("Bytes are not equal. got %v; want %v", got, tc.want) t.Errorf("Bytes:\n%v\n%v", got, tc.want) } }) } }
explode_data.jsonl/12824
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 55438, 23877, 91101, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 31996, 6132, 2271, 37302, 341, 197, 3244, 16708, 44415, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 52051, 1669, 1281, 10556, 3782, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAsIs(t *testing.T) { eRR := errors.New("foobar") ee := Wrap(eRR) eee := Wrap(ee) eeee := Wrap(eee) e := Wrap(eeee) // non-wrapped pointer with as should fail if As(eRR, ErrMadNetType()) { t.Fatal("") } // pointer with as should not fail for type if !As(e, ErrMadNetType()) { t.Fatal("") } // pointer with as should not fail for wrapped type if !As(e, &eRR) { t.Fatal("") } // pointers with is should fail if Is(eRR, *ErrMadNetType()) { t.Fatal("") } // pointers with is should fail if Is(e, *ErrMadNetType()) { t.Fatal("") } // vars with is should not fail for correct var if !Is(e, eRR) { t.Fatal("") } }
explode_data.jsonl/54725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 2121, 3872, 1155, 353, 8840, 836, 8, 341, 7727, 8106, 1669, 5975, 7121, 445, 50267, 1138, 197, 2127, 1669, 42187, 2026, 8106, 340, 197, 34063, 1669, 42187, 2026, 68, 340, 197, 68616, 1669, 42187, 2026, 2127, 340, 7727, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestClientSpanDropped(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { w.Write([]byte(req.Header.Get("Elastic-Apm-Traceparent"))) })) defer server.Close() tracer, transport := transporttest.NewRecorderTracer() defer tracer.Close() tracer.SetMaxSpans(1) tx := tracer.StartTransaction("name", "type") ctx := apm.ContextWithTransaction(context.Background(), tx) var responseBodies []string for i := 0; i < 2; i++ { _, responseBody := mustGET(ctx, server.URL) responseBodies = append(responseBodies, string(responseBody)) } tx.End() tracer.Flush(nil) payloads := transport.Payloads() require.Len(t, payloads.Spans, 1) transaction := payloads.Transactions[0] span := payloads.Spans[0] // for first request clientTraceContext, err := apmhttp.ParseTraceparentHeader(string(responseBodies[0])) require.NoError(t, err) assert.Equal(t, span.TraceID, model.TraceID(clientTraceContext.Trace)) assert.Equal(t, span.ID, model.SpanID(clientTraceContext.Span)) clientTraceContext, err = apmhttp.ParseTraceparentHeader(string(responseBodies[1])) require.NoError(t, err) assert.Equal(t, transaction.TraceID, model.TraceID(clientTraceContext.Trace)) assert.Equal(t, transaction.ID, model.SpanID(clientTraceContext.Span)) }
explode_data.jsonl/49616
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 483 }
[ 2830, 3393, 2959, 12485, 35, 41716, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 4232, 353, 1254, 9659, 8, 341, 197, 6692, 4073, 10556, 3782, 6881, 15753, 2234, 445, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBadReplicaValues(t *testing.T) { g := gomega.NewGomegaWithT(t) kfsvc := TFExampleKFService.DeepCopy() kfsvc.Spec.MinReplicas = -1 g.Expect(kfsvc.ValidateCreate()).Should(gomega.MatchError("MinReplicas cannot be less than 0")) kfsvc.Spec.MinReplicas = 1 kfsvc.Spec.MaxReplicas = -1 g.Expect(kfsvc.ValidateCreate()).Should(gomega.MatchError("MaxReplicas cannot be less than 0")) kfsvc.Spec.MinReplicas = 2 kfsvc.Spec.MaxReplicas = 1 g.Expect(kfsvc.ValidateCreate()).Should(gomega.MatchError("MinReplicas cannot be greater than MaxReplicas")) }
explode_data.jsonl/50836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 17082, 18327, 15317, 6227, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 16463, 69, 58094, 1669, 29145, 13314, 65008, 1860, 55602, 12106, 741, 16463, 69, 58094, 36473, 17070, 18327...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCertStoreMismatchedIdentity(t *testing.T) { mismatchedIdentity := func(nonce uint64) proto.ReceivedMessage { return createUpdateMessage(nonce, createMismatchedUpdateMessage()) } pm, cs, _ := createObjects(mismatchedIdentity, nil) defer pm.Stop() defer cs.stop() testCertificateUpdate(t, false, cs) }
explode_data.jsonl/52300
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 36934, 6093, 82572, 291, 18558, 1155, 353, 8840, 836, 8, 341, 2109, 24976, 291, 18558, 1669, 2915, 1445, 13184, 2622, 21, 19, 8, 18433, 2817, 8771, 2052, 341, 197, 853, 1855, 4289, 2052, 1445, 13184, 11, 1855, 82572, 291, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew(t *testing.T) { ctx, _ := SetupFakeContext(t) c := NewController(ctx, configmap.NewStaticWatcher()) if c == nil { t.Fatal("Expected NewController to return a non-nil value") } }
explode_data.jsonl/10605
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 20985, 11, 716, 1669, 18626, 52317, 1972, 1155, 692, 1444, 1669, 1532, 2051, 7502, 11, 2193, 2186, 7121, 11690, 47248, 12367, 743, 272, 621, 2092, 341, 197, 3244, 26133, 445, 18896, 1532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestExternalIPs(t *testing.T) { ipt := iptablestest.NewFake() ipvs := ipvstest.NewFake() ipset := ipsettest.NewFake(testIPSetVersion) fp := NewFakeProxier(ipt, ipvs, ipset, nil, nil, v1.IPv4Protocol) svcIP := "10.20.30.41" svcPort := 80 svcExternalIPs := sets.NewString("50.60.70.81", "2012::51", "127.0.0.1") svcPortName := proxy.ServicePortName{ NamespacedName: makeNSN("ns1", "svc1"), Port: "p80", } makeServiceMap(fp, makeTestService(svcPortName.Namespace, svcPortName.Name, func(svc *v1.Service) { svc.Spec.Type = "ClusterIP" svc.Spec.ClusterIP = svcIP svc.Spec.ExternalIPs = svcExternalIPs.UnsortedList() svc.Spec.Ports = []v1.ServicePort{{ Name: svcPortName.Port, Port: int32(svcPort), Protocol: v1.ProtocolTCP, TargetPort: intstr.FromInt(svcPort), }} }), ) epIP := "10.180.0.1" udpProtocol := v1.ProtocolUDP populateEndpointSlices(fp, makeTestEndpointSlice(svcPortName.Namespace, svcPortName.Name, 1, func(eps *discovery.EndpointSlice) { eps.AddressType = discovery.AddressTypeIPv4 eps.Endpoints = []discovery.Endpoint{{ Addresses: []string{epIP}, }} eps.Ports = []discovery.EndpointPort{{ Name: utilpointer.StringPtr(svcPortName.Port), Port: utilpointer.Int32(int32(svcPort)), Protocol: &udpProtocol, }} }), ) fp.syncProxyRules() // check ipvs service and destinations services, err := ipvs.GetVirtualServers() if err != nil { t.Errorf("Failed to get ipvs services, err: %v", err) } if len(services) != 3 { // ipvs filters out by ipfamily t.Errorf("Expect 3 ipvs services, got %d", len(services)) } found := false for _, svc := range services { if svcExternalIPs.Has(svc.Address.String()) && svc.Port == uint16(svcPort) && svc.Protocol == string(v1.ProtocolTCP) { found = true destinations, _ := ipvs.GetRealServers(svc) for _, dest := range destinations { if dest.Address.String() != epIP || dest.Port != uint16(svcPort) { t.Errorf("service Endpoint mismatch ipvs service destination") } } break } } if !found { t.Errorf("Expect external ip type service, got none") } }
explode_data.jsonl/44355
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 941 }
[ 2830, 3393, 25913, 3298, 82, 1155, 353, 8840, 836, 8, 341, 8230, 417, 1669, 66068, 480, 267, 477, 7121, 52317, 741, 46531, 11562, 1669, 45475, 267, 477, 7121, 52317, 741, 46531, 746, 1669, 5997, 746, 1944, 7121, 52317, 8623, 3298, 1649,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewWriterSizeIdempotent(t *testing.T) { const BufSize = 1000 b := NewWriterSize(new(bytes.Buffer), BufSize) // Does it recognize itself? b1 := NewWriterSize(b, BufSize) if b1 != b { t.Error("NewWriterSize did not detect underlying Writer") } // Does it wrap if existing buffer is too small? b2 := NewWriterSize(b, 2*BufSize) if b2 == b { t.Error("NewWriterSize did not enlarge buffer") } }
explode_data.jsonl/2880
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 3564, 6492, 1695, 764, 3262, 63532, 1155, 353, 8840, 836, 8, 341, 4777, 69013, 1695, 284, 220, 16, 15, 15, 15, 198, 2233, 1669, 1532, 6492, 1695, 1755, 23158, 22622, 701, 69013, 1695, 340, 197, 322, 12553, 432, 15282, 5086...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStateChanges(t *testing.T) { stateTestWrapper, state := createFreshDBAndConstructState(t) // add keys state.TxBegin("txUuid") state.Set("chaincode1", "key1", []byte("value1")) state.Set("chaincode1", "key2", []byte("value2")) state.TxFinish("txUuid", true) //chehck in-memory testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("value1")) testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", true)) delta := state.getStateDelta() // save to db stateTestWrapper.persistAndClearInMemoryChanges(0) testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("value1")) testutil.AssertEquals(t, stateTestWrapper.fetchStateDeltaFromDB(0), delta) // make changes when data is already in db state.TxBegin("txUuid") state.Set("chaincode1", "key1", []byte("new_value1")) state.TxFinish("txUuid", true) testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("new_value1")) state.TxBegin("txUuid") state.Delete("chaincode1", "key2") state.TxFinish("txUuid", true) testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", false)) state.TxBegin("txUuid") state.Set("chaincode2", "key3", []byte("value3")) state.Set("chaincode2", "key4", []byte("value4")) state.TxFinish("txUuid", true) delta = state.getStateDelta() stateTestWrapper.persistAndClearInMemoryChanges(1) testutil.AssertEquals(t, stateTestWrapper.fetchStateDeltaFromDB(1), delta) testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("new_value1")) testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", true)) testutil.AssertEquals(t, stateTestWrapper.get("chaincode2", "key3", true), []byte("value3")) }
explode_data.jsonl/69014
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 653 }
[ 2830, 3393, 1397, 11317, 1155, 353, 8840, 836, 8, 341, 24291, 2271, 11542, 11, 1584, 1669, 1855, 55653, 3506, 3036, 28468, 1397, 1155, 340, 197, 322, 912, 6894, 198, 24291, 81362, 11135, 445, 3998, 38431, 1138, 24291, 4202, 445, 8819, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPGPPullOne(t *testing.T) { tc := SetupEngineTest(t, "pgp_pull") defer tc.Cleanup() sigVersion := libkb.GetDefaultSigVersion(tc.G) users := []string{"t_alice", "t_bob"} fu := createUserWhoTracks(tc, users, sigVersion) defer untrackUserList(tc, fu, users, sigVersion) gpgClient := createGpgClient(tc) assertKeysMissing(t, gpgClient, []string{aliceFp, bobFp}) runPGPPull(tc, PGPPullEngineArg{ // ID'ing the same user twice should be ok. UserAsserts: []string{"t_bob"}, }) assertKeysPresent(t, gpgClient, []string{bobFp}) assertKeysMissing(t, gpgClient, []string{aliceFp}) }
explode_data.jsonl/44546
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 11383, 4406, 617, 3966, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 18626, 4571, 2271, 1155, 11, 330, 3517, 79, 65693, 1138, 16867, 17130, 727, 60639, 741, 84841, 5637, 1669, 3051, 21310, 2234, 3675, 47246, 5637, 44415, 1224, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBundlingFilesOutsideOfOutbase(t *testing.T) { splitting_suite.expectBundled(t, bundled{ files: map[string]string{ "/src/entry.js": ` console.log('test') `, }, entryPaths: []string{"/src/entry.js"}, options: config.Options{ Mode: config.ModeBundle, CodeSplitting: true, OutputFormat: config.FormatESModule, AbsOutputBase: "/some/nested/directory", AbsOutputDir: "/out", }, }) }
explode_data.jsonl/38604
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 33, 1241, 2718, 10809, 41365, 2124, 2662, 3152, 1155, 353, 8840, 836, 8, 341, 1903, 2292, 1280, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 3548, 14, 4085, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormatterForegroundBrightColors(test *testing.T) { formatted, err := formatter.Format("{cyan | bright | foreground}cyan{normal}") assert.NoError(test, err) assert.Equal(test, "\x1b[96mcyan\x1b[0m", formatted) }
explode_data.jsonl/39766
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 14183, 25982, 74676, 13108, 8623, 353, 8840, 836, 8, 341, 37410, 12127, 11, 1848, 1669, 24814, 9978, 13976, 69746, 760, 9906, 760, 39305, 92, 69746, 90, 8252, 55266, 6948, 35699, 8623, 11, 1848, 340, 6948, 12808, 8623, 11, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestValidateBirthYear(t *testing.T) { if !ValidateBirthYear("1920") { t.Error("1920") t.FailNow() } if !ValidateBirthYear("1930") { t.Error("1930") t.FailNow() } if !ValidateBirthYear("2002") { t.Error("2002") t.FailNow() } if ValidateBirthYear("2003") { t.Error("2003") t.FailNow() } if ValidateBirthYear("2020") { t.Error("2020") t.FailNow() } if ValidateBirthYear("1919") { t.Error("1919") t.FailNow() } }
explode_data.jsonl/24409
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 17926, 31478, 9490, 1155, 353, 8840, 836, 8, 341, 743, 753, 17926, 31478, 9490, 445, 16, 24, 17, 15, 899, 341, 197, 3244, 6141, 445, 16, 24, 17, 15, 1138, 197, 3244, 57243, 7039, 741, 197, 532, 743, 753, 17926, 31478, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestUpdate(t *testing.T) { f := newFixture(t) defer f.teardown() t1 := time.Unix(1, 0) f.resource("foo", "true", t1) f.step() f.assertStatus("foo", model.RuntimeStatusOK, 1) t2 := time.Unix(2, 0) f.resource("foo", "false", t2) f.step() f.assertStatus("foo", model.RuntimeStatusOK, 2) f.assertNoAction("error for cancel", func(action store.Action) bool { a, ok := action.(LocalServeStatusAction) if !ok { return false } return a.ManifestName == "foo" && a.Status == model.RuntimeStatusError }) f.assertNoAction("log for cancel", func(action store.Action) bool { a, ok := action.(store.LogEvent) if !ok { return false } return a.ManifestName() == "foo" && strings.Contains(string(a.Message()), "cmd true canceled") }) f.fe.RequireNoKnownProcess(t, "true") f.assertLogMessage("foo", "Starting cmd false") }
explode_data.jsonl/53807
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 4289, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 340, 16867, 282, 31853, 37496, 2822, 3244, 16, 1669, 882, 10616, 941, 7, 16, 11, 220, 15, 340, 1166, 24013, 445, 7975, 497, 330, 1866, 497, 259, 16, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCT_ColorTransformConstructor(t *testing.T) { v := diagram.NewCT_ColorTransform() if v == nil { t.Errorf("diagram.NewCT_ColorTransform must return a non-nil value") } if err := v.Validate(); err != nil { t.Errorf("newly constructed diagram.CT_ColorTransform should validate: %s", err) } }
explode_data.jsonl/63104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 1162, 43387, 8963, 13288, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 13549, 7121, 1162, 43387, 8963, 741, 743, 348, 621, 2092, 341, 197, 3244, 13080, 445, 8579, 5745, 7121, 1162, 43387, 8963, 1969, 470, 264, 2477, 83248, 897,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestResolveIdentifyImplicitTeamWithDuplicates(t *testing.T) { tt := newTeamTester(t) defer tt.cleanup() alice := tt.addUser("abc") g := alice.tc.G bob := tt.addUser("bob") iTeamNameCreate := strings.Join([]string{alice.username, bob.username}, ",") // simple duplicate iTeamNameLookup1 := strings.Join([]string{alice.username, bob.username, bob.username}, ",") // duplicate after resolution iTeamNameLookup2 := strings.Join([]string{alice.username, bob.username, bob.username + "@rooter"}, ",") // duplicate across reader boundary iTeamNameLookup3 := strings.Join([]string{alice.username, bob.username + "@rooter"}, ",") + "#" + bob.username t.Logf("make an implicit team") iTeam, _, _, err := teams.LookupOrCreateImplicitTeam(context.TODO(), g, iTeamNameCreate, false /*isPublic*/) require.NoError(t, err) bob.proveRooter() cli, err := client.GetIdentifyClient(g) require.NoError(t, err, "failed to get new identifyclient") for i, lookup := range []string{iTeamNameLookup1, iTeamNameLookup2, iTeamNameLookup3} { t.Logf("checking %v: %v", i, lookup) res, err := cli.ResolveIdentifyImplicitTeam(context.Background(), keybase1.ResolveIdentifyImplicitTeamArg{ Assertions: lookup, Suffix: "", IsPublic: false, DoIdentifies: false, Create: false, IdentifyBehavior: keybase1.TLFIdentifyBehavior_DEFAULT_KBFS, }) require.NoError(t, err, "%v %v", err, spew.Sdump(res)) require.Equal(t, res.TeamID, iTeam.ID) require.Equal(t, res.DisplayName, iTeamNameCreate) require.True(t, compareUserVersionSets([]keybase1.UserVersion{alice.userVersion(), bob.userVersion()}, res.Writers)) require.Nil(t, res.TrackBreaks, "track breaks") } }
explode_data.jsonl/42661
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 675 }
[ 2830, 3393, 56808, 28301, 1437, 59558, 14597, 2354, 76851, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 501, 14597, 58699, 1155, 340, 16867, 17853, 87689, 2822, 197, 63195, 1669, 17853, 1364, 1474, 445, 13683, 1138, 3174, 1669, 70433, 78...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFileSetModTime(t *testing.T) { r := fstest.NewRun(t) if !canSetModTime(t, r) { return } defer r.Finalise() vfs, file, file1 := fileCreate(t, r, vfscommon.CacheModeOff) err := file.SetModTime(t2) require.NoError(t, err) file1.ModTime = t2 fstest.CheckItems(t, r.Fremote, file1) vfs.Opt.ReadOnly = true err = file.SetModTime(t2) assert.Equal(t, EROFS, err) }
explode_data.jsonl/9732
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 1703, 1649, 4459, 1462, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 48434, 477, 7121, 6727, 1155, 340, 743, 753, 4814, 1649, 4459, 1462, 1155, 11, 435, 8, 341, 197, 853, 198, 197, 532, 16867, 435, 991, 977, 1064, 741, 5195...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWrapperGetActiveOrders(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() || !canManipulateRealOrders { t.Skip("skipping test: api keys not set or canManipulateRealOrders set to false") } p, err := currency.NewPairFromString("EOS-USDT") if err != nil { t.Error(err) } _, err = b.GetActiveOrders(context.Background(), &order.GetOrdersRequest{ Type: order.AnyType, Side: order.AnySide, Pairs: currency.Pairs{p}, AssetType: asset.CoinMarginedFutures, }) if err != nil { t.Error(err) } p2, err := currency.NewPairFromString("BTCUSDT") if err != nil { t.Error(err) } _, err = b.GetActiveOrders(context.Background(), &order.GetOrdersRequest{ Type: order.AnyType, Side: order.AnySide, Pairs: currency.Pairs{p2}, AssetType: asset.USDTMarginedFutures, }) if err != nil { t.Error(err) } }
explode_data.jsonl/76669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 372 }
[ 2830, 3393, 11542, 1949, 5728, 24898, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 1369, 753, 4814, 92876, 6334, 12768, 24898, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 25, 6330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSlackAddToEPStep(t *testing.T) { t.Parallel() sql := ` insert into escalation_policies (id, name) values ({{uuid "eid"}}, 'esc policy'); insert into services (id, escalation_policy_id, name) values ({{uuid "sid"}}, {{uuid "eid"}}, 'service'); ` h := harness.NewHarness(t, sql, "slack-user-link") defer h.Close() doQL := func(t *testing.T, query string) { g := h.GraphQLQuery2(query) for _, err := range g.Errors { t.Error("GraphQL Error:", err.Message) } if len(g.Errors) > 0 { t.Fatal("errors returned from GraphQL") } t.Log("Response:", string(g.Data)) } channel := h.Slack().Channel("test") doQL(t, fmt.Sprintf(` mutation { createEscalationPolicyStep(input:{ escalationPolicyID: "%s", delayMinutes: 5, targets: [{ id: "%s", type: slackChannel, }], }){ id } } `, h.UUID("eid"), channel.ID())) h.CreateAlert(h.UUID("sid"), "testing") channel.ExpectMessage("testing") }
explode_data.jsonl/24833
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 434 }
[ 2830, 3393, 7442, 473, 2212, 1249, 9197, 8304, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 30633, 1669, 22074, 59847, 1119, 70289, 620, 42038, 320, 307, 11, 829, 8, 715, 45939, 198, 197, 197, 2306, 90, 17128, 330, 61878, 92...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddLogMultiWithExtKeyErr(t *testing.T) { mockdb := &mockMongo{ data: nil, err: errors.New("mock error, TestAddLogMultiWithExtKeyErr"), errTrigger: 1, errTriggerStep: 0, } DB = mockdb contents := []auditoplog.AuditLogExt{ auditoplog.AuditLogExt{ID: 1, Content: "row1", ExtKey: "127.0.0.1"}, } err := AddLogMultiWithExtKey(1, auditoplog.AuditOpTypeAdd, common.BKInnerObjIDHost, contents, "mock desc", common.BKDefaultOwnerID, "user") if err != mockdb.err { t.Error(err) } }
explode_data.jsonl/56520
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 2212, 2201, 20358, 2354, 6756, 1592, 7747, 1155, 353, 8840, 836, 8, 341, 77333, 1999, 1669, 609, 16712, 54998, 515, 197, 8924, 25, 1843, 2092, 345, 197, 9859, 25, 310, 5975, 7121, 445, 16712, 1465, 11, 3393, 2212, 2201, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDelete(t *testing.T) { //Test should create the file before deleting it path := "k8s://namespace/pod/container/path/to/delete/from" if err := skbn.Delete(path, 1); err != nil { log.Fatal(err) } }
explode_data.jsonl/13930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 6435, 1155, 353, 8840, 836, 8, 341, 197, 322, 2271, 1265, 1855, 279, 1034, 1573, 33011, 432, 198, 26781, 1669, 330, 74, 23, 82, 1110, 2231, 4322, 347, 83897, 50976, 32429, 32275, 91106, 698, 743, 1848, 1669, 1901, 11081, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestRequirementsVersionStream(t *testing.T) { t.Parallel() o := TestUpgradeBootOptions{} o.setup() vs, err := o.requirementsVersionStream() require.NoError(t, err, "could not get requirements version stream") assert.Equal(t, "2367726d02b8c", vs.Ref, "RequirementsVersionStream Ref") assert.Equal(t, "https://github.com/jenkins-x/jenkins-x-versions.git", vs.URL, "RequirementsVersionStream URL") }
explode_data.jsonl/29839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 59202, 5637, 3027, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 22229, 1669, 3393, 43861, 17919, 3798, 16094, 22229, 25338, 2822, 5195, 82, 11, 1848, 1669, 297, 23712, 1368, 5637, 3027, 741, 17957, 35699, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLaunchInstance(t *testing.T) { machine, err := stubMachine() if err != nil { t.Fatalf("Unable to build test machine manifest: %v", err) } credSecretName := fmt.Sprintf("%s-%s", credentialsSecretName, rand.String(nameLength)) providerConfig := stubProviderConfig(credSecretName) cases := []struct { name string createInstanceErr error instancesErr error expectError bool }{ { name: "Create instance error", createInstanceErr: fmt.Errorf("create instnace failed "), instancesErr: nil, expectError: true, }, { name: "Get instance error", createInstanceErr: nil, instancesErr: fmt.Errorf("get instance failed "), expectError: true, }, { name: "Success test", createInstanceErr: nil, instancesErr: nil, expectError: false, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { mockCtrl := gomock.NewController(t) mockPowerVSClient := mock.NewMockClient(mockCtrl) //Setup the mocks mockPowerVSClient.EXPECT().CreateInstance(gomock.Any()).Return(stubGetInstances(), tc.createInstanceErr).AnyTimes() mockPowerVSClient.EXPECT().GetInstance(gomock.Any()).Return(stubGetInstance(), tc.instancesErr).AnyTimes() mockPowerVSClient.EXPECT().GetImages().Return(stubGetImages(imageNamePrefix, 3), nil).AnyTimes() mockPowerVSClient.EXPECT().GetNetworks().Return(stubGetNetworks(networkNamePrefix, 3), nil).AnyTimes() _, launchErr := launchInstance(machine, providerConfig, nil, mockPowerVSClient) t.Log(launchErr) if tc.expectError { if launchErr == nil { t.Errorf("Call to launchInstance did not fail as expected") } } else { if launchErr != nil { t.Errorf("Call to launchInstance did not succeed as expected") } } }) } }
explode_data.jsonl/36618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 778 }
[ 2830, 3393, 32067, 2523, 1155, 353, 8840, 836, 8, 341, 2109, 3814, 11, 1848, 1669, 13633, 21605, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 17075, 311, 1936, 1273, 5662, 14455, 25, 1018, 85, 497, 1848, 340, 197, 532, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUpdateServiceMapHeadless(t *testing.T) { fp := newFakeProxier() makeServiceMap(fp, makeTestService("ns2", "headless", func(svc *v1.Service) { svc.Spec.Type = v1.ServiceTypeClusterIP svc.Spec.ClusterIP = v1.ClusterIPNone svc.Spec.Ports = addTestPort(svc.Spec.Ports, "rpc", "UDP", 1234, 0, 0) }), makeTestService("ns2", "headless-without-port", func(svc *v1.Service) { svc.Spec.Type = v1.ServiceTypeClusterIP svc.Spec.ClusterIP = v1.ClusterIPNone }), ) // Headless service should be ignored result := UpdateServiceMap(fp.serviceMap, fp.serviceChanges) if len(fp.serviceMap) != 0 { t.Errorf("expected service map length 0, got %d", len(fp.serviceMap)) } // No proxied services, so no healthchecks if len(result.HCServiceNodePorts) != 0 { t.Errorf("expected healthcheck ports length 0, got %d", len(result.HCServiceNodePorts)) } if len(result.UDPStaleClusterIP) != 0 { t.Errorf("expected stale UDP services length 0, got %d", len(result.UDPStaleClusterIP)) } }
explode_data.jsonl/22230
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 404 }
[ 2830, 3393, 4289, 1860, 2227, 12346, 1717, 1155, 353, 8840, 836, 8, 341, 65219, 1669, 501, 52317, 1336, 87, 1268, 2822, 77438, 1860, 2227, 17815, 345, 197, 77438, 2271, 1860, 445, 4412, 17, 497, 330, 1983, 1717, 497, 2915, 1141, 7362, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_BlindSignature(t *testing.T) { p := new(eccutil.Point) p.X, p.Y = new(big.Int), new(big.Int) n := NewBlindSignature(p) n.R.X, n.R.Y = new(big.Int), new(big.Int) n.S.X, n.S.Y = new(big.Int), new(big.Int) b, err := n.Marshal() if err != nil { t.Fatalf("Marshalling failed: %s", err) } _, err = n.Unmarshal(b) if err != nil { t.Fatalf("UnMarshalling failed: %s", err) } n.PubKey.X = big.NewInt(3) _, err = n.Unmarshal(b) if err == nil { t.Fatal("UnMarshalling must fail for foreign signer") } }
explode_data.jsonl/8840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 56947, 484, 25088, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 501, 2026, 638, 1314, 3775, 340, 3223, 4338, 11, 281, 7507, 284, 501, 75616, 7371, 701, 501, 75616, 7371, 340, 9038, 1669, 1532, 4923, 484, 25088, 1295, 340, 903...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAddNotExisting(t *testing.T) { kv := secretmanager.NewInMemoryClient("cl-test", "notfoo", "bar") cmdOpts := addCommand{Positional: addCommandPositional{"cl-test", "foo"}, Data: "baz", Labels: map[string]string{"a": "b"}, client: kv} err := cmdOpts.Execute([]string{}) assert.NoError(t, err) secret, _ := kv.Get("foo") secretData, _ := secret.GetValue() assert.Equal(t, "baz", string(secretData)) }
explode_data.jsonl/60320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 2212, 2623, 53067, 1155, 353, 8840, 836, 8, 341, 16463, 85, 1669, 6234, 13297, 7121, 641, 10642, 2959, 445, 564, 16839, 497, 330, 1921, 7975, 497, 330, 2257, 1138, 25920, 43451, 1669, 912, 4062, 90, 3812, 278, 25, 912, 406...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPartialValuesPython(t *testing.T) { integration.ProgramTest(t, &integration.ProgramTestOptions{ Dir: filepath.Join("partial_values", "python"), Dependencies: []string{ path.Join("..", "..", "sdk", "python", "env", "src"), }, AllowEmptyPreviewChanges: true, }) }
explode_data.jsonl/76376
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 37314, 6227, 30280, 1155, 353, 8840, 836, 8, 341, 2084, 17376, 80254, 2271, 1155, 11, 609, 60168, 80254, 2271, 3798, 515, 197, 197, 6184, 25, 26054, 22363, 445, 37420, 9146, 497, 330, 12669, 4461, 197, 197, 48303, 25, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWith_outputDir(t *testing.T) { tmpDir, err := ioutil.TempDir("", "") require.NoError(t, err) defer os.RemoveAll(tmpDir) // Use env to enable this like a user would. os.Setenv("DD_PROFILING_OUTPUT_DIR", tmpDir) defer os.Unsetenv("DD_PROFILING_OUTPUT_DIR") p, err := unstartedProfiler() require.NoError(t, err) bat := batch{ end: time.Now(), profiles: []*profile{ {name: "foo.pprof", data: []byte("foo")}, {name: "bar.pprof", data: []byte("bar")}, }, } require.NoError(t, p.outputDir(bat)) files, err := filepath.Glob(filepath.Join(tmpDir, "*", "*.pprof")) require.NoError(t, err) fileData := map[string]string{} for _, file := range files { data, err := ioutil.ReadFile(file) require.NoError(t, err) fileData[filepath.Base(file)] = string(data) } want := map[string]string{"foo.pprof": "foo", "bar.pprof": "bar"} require.Equal(t, want, fileData) }
explode_data.jsonl/18539
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 2354, 7645, 6184, 1155, 353, 8840, 836, 8, 341, 20082, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 14676, 17957, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 10368, 6184, 692, 197, 322, 5443, 6105, 311, 7283, 419, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReferenceRenderer(t *testing.T) { files, err := ioutil.ReadDir(testdataPath) if err != nil { t.Fatalf("could not read test data: %v", err) } for _, f := range files { if filepath.Ext(f.Name()) != ".json" { continue } t.Run(f.Name(), func(t *testing.T) { path := filepath.Join(testdataPath, f.Name()) contents, err := ioutil.ReadFile(path) if err != nil { t.Fatalf("could not read %v: %v", path, err) } var spec PackageSpec if err = json.Unmarshal(contents, &spec); err != nil { t.Fatalf("could not unmarshal package spec: %v", err) } pkg, err := ImportSpec(spec, nil) if err != nil { t.Fatalf("could not import package: %v", err) } for _, doc := range getDocsForPackage(pkg) { t.Run(doc.entity, func(t *testing.T) { text := []byte(fmt.Sprintf("[entity](%s)", doc.entity)) expected := strings.Replace(doc.entity, "/", "_", -1) + "\n" parsed := ParseDocs(text) actual := []byte(RenderDocsToString(text, parsed, WithReferenceRenderer( func(r *Renderer, w io.Writer, src []byte, l *ast.Link, enter bool) (ast.WalkStatus, error) { if !enter { return ast.WalkContinue, nil } replaced := bytes.Replace(l.Destination, []byte{'/'}, []byte{'_'}, -1) if _, err := r.MarkdownRenderer().Write(w, replaced); err != nil { return ast.WalkStop, err } return ast.WalkSkipChildren, nil }))) assert.Equal(t, expected, string(actual)) }) } }) } }
explode_data.jsonl/64855
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 676 }
[ 2830, 3393, 8856, 11541, 1155, 353, 8840, 836, 8, 341, 74075, 11, 1848, 1669, 43144, 6503, 6184, 8623, 691, 1820, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 28077, 537, 1349, 1273, 821, 25, 1018, 85, 497, 1848, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOn(t *testing.T) { c := New(Config{ BotToken: "sdkjfhdksfhskdjfhdkfjsd", DisableCache: true, }) t.Run("normal Session", func(t *testing.T) { defer func() { if r := recover(); r != nil { t.Errorf("should not have triggered a panic") } }() c.Gateway().ChannelCreate(func(s Session, e *ChannelCreate) {}) }) t.Run("normal Session with ctrl", func(t *testing.T) { defer func() { if r := recover(); r != nil { t.Errorf("should not have triggered a panic. %s", r) } }() c. Gateway(). WithCtrl(&Ctrl{Runs: 1}). ChannelCreate(func(s Session, e *ChannelCreate) {}) }) t.Run("normal Session with multiple ctrl's", func(t *testing.T) { defer func() { if r := recover(); r == nil { t.Errorf("multiple controllers should trigger a panic. %s", r) } }() c.Gateway(). WithCtrl(&Ctrl{Runs: 1}). WithCtrl(&Ctrl{Until: time.Now().Add(1 * time.Minute)}). ChannelCreate(func(s Session, e *ChannelCreate) {}) }) }
explode_data.jsonl/41652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 420 }
[ 2830, 3393, 1925, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 33687, 515, 197, 12791, 354, 3323, 25, 257, 330, 51295, 96423, 15990, 2787, 31104, 4886, 77504, 31104, 7584, 69, 2519, 67, 756, 197, 197, 25479, 8233, 25, 830, 345, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCleanupNS(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ecscniClient := NewClient(&Config{}) libcniClient := mock_libcni.NewMockCNI(ctrl) ecscniClient.(*cniClient).libcni = libcniClient // This will be called for both bridge and eni plugin libcniClient.EXPECT().DelNetwork(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil).Times(2) additionalRoutesJson := `["169.254.172.1/32", "10.11.12.13/32"]` var additionalRoutes []cnitypes.IPNet err := json.Unmarshal([]byte(additionalRoutesJson), &additionalRoutes) assert.NoError(t, err) err = ecscniClient.CleanupNS(context.TODO(), &Config{AdditionalLocalRoutes: additionalRoutes}, time.Second) assert.NoError(t, err) }
explode_data.jsonl/30397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 280 }
[ 2830, 3393, 67335, 2448, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 757, 2388, 7751, 2959, 1669, 1532, 2959, 2099, 2648, 37790, 197, 55576, 7751, 2959, 1669, 78...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateModel_DocString(t *testing.T) { funcMap := FuncMapFunc(DefaultLanguageFunc()) templ := template.Must(template.New("docstring").Funcs(funcMap).Parse(string(assets["docstring.gotmpl"]))) tt := templateTest{t, templ} var gmp GenSchema gmp.Title = "The title of the property" gmp.Description = "The description of the property" var expected = `The title of the property // // The description of the property` tt.assertRender(gmp, expected) gmp.Title = "" expected = `The description of the property` tt.assertRender(gmp, expected) gmp.Description = "" gmp.Name = "theModel" expected = `the model` tt.assertRender(gmp, expected) }
explode_data.jsonl/2492
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 31115, 1712, 1557, 509, 703, 1155, 353, 8840, 836, 8, 341, 29244, 2227, 1669, 18016, 2227, 9626, 87874, 13806, 9626, 2398, 197, 74860, 1669, 3811, 50463, 29963, 7121, 445, 5236, 917, 1827, 9626, 82, 18552, 2227, 568, 14463, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBatchModeParseFunc(t *testing.T) { defer func() { if err := recover(); err != nil { t.Errorf("panic in test is not acceptable") } }() f, ok := mapParse[BatchMode] if !ok { t.Errorf("Cannot found parse-function") } if f == nil { t.Errorf("Parse-function is nil") } // TODO }
explode_data.jsonl/13455
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 21074, 3636, 14463, 9626, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 743, 1848, 1669, 11731, 2129, 1848, 961, 2092, 341, 298, 3244, 13080, 445, 19079, 304, 1273, 374, 537, 21555, 1138, 197, 197, 532, 197, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateServiceAgreement(t *testing.T) { t.Parallel() store, cleanup := cltest.NewStore(t) defer cleanup() err := store.KeyStore.Unlock(cltest.Password) _, fromAddress := cltest.MustAddRandomKeyToKeystore(t, store, 0) assert.NoError(t, err) _, err = store.KeyStore.NewAccount() assert.NoError(t, err) oracles := []string{fromAddress.Hex()} basic := string(cltest.MustReadFile(t, "testdata/hello_world_agreement.json")) basic = cltest.MustJSONSet(t, basic, "oracles", oracles) threeDays, _ := time.ParseDuration("72h") basic = cltest.MustJSONSet(t, basic, "endAt", time.Now().Add(threeDays)) tests := []struct { name string input string wantError bool }{ {"basic", basic, false}, {"no payment", cltest.MustJSONDel(t, basic, "payment"), true}, {"less than minimum payment", cltest.MustJSONSet(t, basic, "payment", "1"), true}, {"less than minimum expiration", cltest.MustJSONSet(t, basic, "expiration", 1), true}, {"without being listed as an oracle", cltest.MustJSONSet(t, basic, "oracles", []string{}), true}, {"past allowed end at", cltest.MustJSONSet(t, basic, "endAt", "3000-06-19T22:17:19Z"), true}, {"before allowed end at", cltest.MustJSONSet(t, basic, "endAt", "2018-06-19T22:17:19Z"), true}, {"more than one initiator should fail", cltest.MustJSONSet(t, basic, "initiators", []models.Initiator{{ JobSpecID: models.NewJobID(), Type: models.InitiatorServiceAgreementExecutionLog, InitiatorParams: models.InitiatorParams{}, }, { JobSpecID: models.NewJobID(), Type: models.InitiatorWeb, InitiatorParams: models.InitiatorParams{}, }, }), true}, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { sa, err := cltest.ServiceAgreementFromString(test.input) require.NoError(t, err) result := services.ValidateServiceAgreement(sa, store) cltest.AssertError(t, test.wantError, result) }) } }
explode_data.jsonl/75333
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 793 }
[ 2830, 3393, 17926, 1860, 9042, 17150, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 57279, 11, 21290, 1669, 1185, 1944, 7121, 6093, 1155, 340, 16867, 21290, 741, 9859, 1669, 3553, 9610, 6093, 39188, 9849, 1944, 25690, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiplicationProofFake(t *testing.T) { g, gok := buildGroup(big.NewInt(47)) require.True(t, gok, "Failed to setup group for Multiplication proof testing") s := newMultiplicationProofStructure("m1", "m2", "mod", "result", 3) proof := s.fakeProof(g) assert.True(t, s.verifyProofStructure(proof), "Fake proof structure rejected.") }
explode_data.jsonl/56445
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 57251, 1693, 31076, 52317, 1155, 353, 8840, 836, 8, 341, 3174, 11, 342, 562, 1669, 1936, 2808, 75616, 7121, 1072, 7, 19, 22, 1171, 17957, 32443, 1155, 11, 342, 562, 11, 330, 9408, 311, 6505, 1874, 369, 58712, 1693, 11064, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileStore_getPrivateKey(t *testing.T) { type fields struct { fs afero.Fs rand io.Reader logger io.Writer } type args struct { encryptedKey *keystore.EncryptedKey deriveKeyOptions multi.OptionsBuilders } tests := []struct { name string fields fields args args want crypto.PrivateKey wantErr bool }{ { "success-sofia-secp256k1", fields{ nil, nil, ioutil.Discard, }, args{ &encryptedKeySofiaSECP256k1, multi.OptionsBuilders{ Scrypt: []scrypt.DeriveOptionsBuilder{scrypt.WithPassphrase("test")}, }, }, secp256k1test.SofiaPrivateKey, false, }, { "success-sofia-ed25519", fields{ nil, nil, ioutil.Discard, }, args{ &encryptedKeySofiaED25519, multi.OptionsBuilders{ Scrypt: []scrypt.DeriveOptionsBuilder{scrypt.WithPassphrase("sofia-ed25519")}, }, }, ed25519test.SofiaPrivateKey, false, }, //{ // "success-charlotte-sr25519", // fields{ // nil, // nil, // ioutil.Discard, // }, // args{ // &encryptedKeyCharlotteSR25519, // multi.OptionsBuilders{ // Scrypt: []scrypt.DeriveOptionsBuilder{scrypt.WithPassphrase("sofia-ed25519")}, // }, // }, // sr25519test.SofiaPrivateKey, // false, //}, { "err-private-key-bytes", fields{ nil, nil, ioutil.Discard, }, args{ func() *keystore.EncryptedKey { m := encryptedKeySofiaED25519 m.CurveType = "invalid" return &m }(), multi.OptionsBuilders{ Scrypt: []scrypt.DeriveOptionsBuilder{scrypt.WithPassphrase("sofia-ed25519")}, }, }, nil, true, }, { "err-wrong-key", fields{ nil, nil, ioutil.Discard, }, args{ &encryptedKeyCharlotteED25519, multi.OptionsBuilders{ Scrypt: []scrypt.DeriveOptionsBuilder{scrypt.WithPassphrase("sofia-ed25519")}, }, }, nil, true, }, { "err-derive-key", fields{ nil, nil, ioutil.Discard, }, args{ func() *keystore.EncryptedKey { m := encryptedKeySofiaED25519 m.KDF = "invalid" return &m }(), multi.OptionsBuilders{ Scrypt: []scrypt.DeriveOptionsBuilder{scrypt.WithPassphrase("sofia-ed25519")}, }, }, nil, true, }, { "err-nil-encrypted-key", fields{ nil, nil, ioutil.Discard, }, args{ nil, multi.OptionsBuilders{ Scrypt: []scrypt.DeriveOptionsBuilder{scrypt.WithPassphrase("sofia-ed25519")}, }, }, nil, true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { f := FileStore{ fs: tt.fields.fs, rand: tt.fields.rand, logger: tt.fields.logger, } got, err := f.getPrivateKey(tt.args.encryptedKey, tt.args.deriveKeyOptions) if (err != nil) != tt.wantErr { t.Errorf("FileStore.getPrivateKey() error = %v, wantErr %v", err, tt.wantErr) return } if !assert.Equal(t, tt.want, got) { t.Errorf("FileStore.getPrivateKey() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/59573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1564 }
[ 2830, 3393, 1703, 6093, 3062, 75981, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 53584, 257, 264, 802, 78, 991, 82, 198, 197, 7000, 437, 256, 6399, 47431, 198, 197, 17060, 6399, 47838, 198, 197, 532, 13158, 2827, 2036, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNopStoreVisitAll(t *testing.T) { ns := NopStore{} for i := 0; i < 5; i++ { id := strconv.FormatInt(int64(i), 10) err := ns.Set(id, NopSession{}) assert.NoError(t, err) } var visitedIds []string ns.VisitAll(func(id string, _ interface{}) bool { visitedIds = append(visitedIds, id) return true }) assert.Len(t, visitedIds, 0) }
explode_data.jsonl/82375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 45, 453, 6093, 26218, 2403, 1155, 353, 8840, 836, 8, 341, 84041, 1669, 451, 453, 6093, 31483, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 20, 26, 600, 1027, 341, 197, 15710, 1669, 33317, 9978, 1072, 1548, 21, 19, 1956, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckExpire2(t *testing.T) { q, mem := initEnv(0) defer q.Close() defer mem.Close() // add tx err := add4Tx(mem.client) if err != nil { t.Error("add tx error", err.Error()) return } mem.setHeader(&types.Header{Height: 50, BlockTime: 1e9 + 1}) msg := mem.client.NewMessage("mempool", types.EventTxList, &types.TxHashList{Count: 100}) mem.client.Send(msg, true) data, err := mem.client.Wait(msg) if err != nil { t.Error(err) return } txs := data.GetData().(*types.ReplyTxList).GetTxs() if len(txs) != 3 { t.Error("TestCheckExpire failed", len(txs)) } }
explode_data.jsonl/16831
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 247 }
[ 2830, 3393, 3973, 8033, 554, 17, 1155, 353, 8840, 836, 8, 341, 18534, 11, 1833, 1669, 2930, 14359, 7, 15, 340, 16867, 2804, 10421, 741, 16867, 1833, 10421, 2822, 197, 322, 912, 9854, 198, 9859, 1669, 912, 19, 31584, 39908, 6581, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSortBackups(t *testing.T) { tests := []struct { name string backupList *v1.BackupList expected []v1.Backup }{ { name: "non-timestamped backups", backupList: &v1.BackupList{Items: []v1.Backup{ {ObjectMeta: metav1.ObjectMeta{Name: "a"}}, {ObjectMeta: metav1.ObjectMeta{Name: "c"}}, {ObjectMeta: metav1.ObjectMeta{Name: "b"}}, }}, expected: []v1.Backup{ {ObjectMeta: metav1.ObjectMeta{Name: "a"}}, {ObjectMeta: metav1.ObjectMeta{Name: "b"}}, {ObjectMeta: metav1.ObjectMeta{Name: "c"}}, }, }, { name: "timestamped backups", backupList: &v1.BackupList{Items: []v1.Backup{ {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030405"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030406"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030407"}}, }}, expected: []v1.Backup{ {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030407"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030406"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030405"}}, }, }, { name: "non-timestamped and timestamped backups", backupList: &v1.BackupList{Items: []v1.Backup{ {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030405"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030406"}}, {ObjectMeta: metav1.ObjectMeta{Name: "a"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030407"}}, }}, expected: []v1.Backup{ {ObjectMeta: metav1.ObjectMeta{Name: "a"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030407"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030406"}}, {ObjectMeta: metav1.ObjectMeta{Name: "schedule-20170102030405"}}, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { sortBackupsByPrefixAndTimestamp(test.backupList) if assert.Equal(t, len(test.backupList.Items), len(test.expected)) { for i := range test.expected { assert.Equal(t, test.expected[i].Name, test.backupList.Items[i].Name) } } }) } }
explode_data.jsonl/71485
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 946 }
[ 2830, 3393, 10231, 3707, 8602, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 197, 31371, 852, 353, 85, 16, 8864, 454, 852, 198, 197, 42400, 256, 3056, 85, 16, 8864, 454, 198, 197, 59403,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDownloadExtraction(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() HandleDownloadObjectSuccessfully(t) response := Download(fake.ServiceClient(), "testContainer", "testObject", nil) // Check []byte extraction bytes, err := response.ExtractContent() th.AssertNoErr(t, err) th.CheckEquals(t, "Successful download with Gophercloud", string(bytes)) }
explode_data.jsonl/30747
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 11377, 840, 26425, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 741, 197, 6999, 11377, 1190, 35959, 1155, 692, 21735, 1669, 8577, 74138, 13860, 2959, 1507, 330, 1944, 4502, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSnakeCase(t *testing.T) { var tests = map[string]string{ "": "", "ID": "id", "ColumnName": "column_name", "COLUMN_NAME": "column_name", "column_name": "column_name", "UserID": "user_id", "UserNameRaw": "user_name_raw", } for i, e := range tests { if v := SnakeCase(i); v != e { t.Errorf("SnakeCase(\"%s\"): expected %s, got %s", i, e, v) } } }
explode_data.jsonl/65314
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 83420, 4207, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 2415, 14032, 30953, 515, 197, 197, 28796, 310, 8324, 197, 197, 1, 915, 788, 688, 330, 307, 756, 197, 197, 1, 26162, 788, 220, 330, 6229, 1269, 756, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDNSProvider_Cleanup(t *testing.T) { provider, mux := setupTest(t) mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, http.MethodGet, r.Method, "method") q := r.URL.Query() assert.Equal(t, q.Get("key"), fakeAPIKey, "key mismatch") assert.Equal(t, q.Get("cmd"), "dns-remove_record", "cmd mismatch") assert.Equal(t, q.Get("format"), "json") assert.Equal(t, q.Get("record"), "_acme-challenge.example.com") assert.Equal(t, q.Get("value"), fakeKeyAuth, "value mismatch") assert.Equal(t, q.Get("comment"), "Managed+By+lego") _, err := fmt.Fprintf(w, `{"data":"record_removed","result":"success"}`) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } }) err := provider.CleanUp("example.com", "", fakeChallengeToken) require.NoError(t, err, "failed to remove TXT record") }
explode_data.jsonl/52236
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 61088, 5179, 920, 60639, 1155, 353, 8840, 836, 8, 341, 197, 19979, 11, 59807, 1669, 6505, 2271, 1155, 692, 2109, 2200, 63623, 35460, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6948, 12808, 1155, 11, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDispatchGenAttrs_Failure(t *testing.T) { h := &mockHandler{err: errors.New("handler is not interested in generating attributes")} bag := attribute.GetFakeMutableBagForTesting(defaultApaAttributes) mapper := template.NewOutputMapperFn(map[string]compiled.Expression{}) _, err := executeDispatchGenAttrs(t, h, bag, mapper) if err == nil { t.Fatal("expected error not found") } }
explode_data.jsonl/64998
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 11283, 9967, 53671, 1400, 9373, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 609, 16712, 3050, 90, 615, 25, 5975, 7121, 445, 17905, 374, 537, 8014, 304, 23163, 8201, 899, 630, 2233, 351, 1669, 7035, 2234, 52317, 11217, 12933, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2