repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
DuncanDoyle/jbpm | jbpm-human-task/jbpm-human-task-audit/src/test/java/org/jbpm/services/task/audit/service/TaskAuditBaseTest.java | 46996 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.services.task.audit.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import org.assertj.core.api.Assertions;
import org.jbpm.services.task.HumanTaskServicesBaseTest;
import org.jbpm.services.task.audit.commands.DeleteAuditEventsCommand;
import org.jbpm.services.task.audit.commands.DeleteBAMTaskSummariesCommand;
import org.jbpm.services.task.audit.commands.GetAuditEventsCommand;
import org.jbpm.services.task.audit.commands.GetBAMTaskSummariesCommand;
import org.jbpm.services.task.audit.impl.model.AuditTaskImpl;
import org.jbpm.services.task.audit.impl.model.BAMTaskSummaryImpl;
import org.jbpm.services.task.audit.service.objects.Person;
import org.jbpm.services.task.impl.model.I18NTextImpl;
import org.jbpm.services.task.utils.TaskFluent;
import org.junit.Test;
import org.kie.api.task.model.I18NText;
import org.kie.api.task.model.Status;
import org.kie.api.task.model.Task;
import org.kie.api.task.model.TaskSummary;
import org.kie.internal.query.QueryFilter;
import org.kie.internal.task.api.AuditTask;
import org.kie.internal.task.api.TaskVariable;
import org.kie.internal.task.api.TaskVariable.VariableType;
import org.kie.internal.task.api.model.TaskEvent;
public abstract class TaskAuditBaseTest extends HumanTaskServicesBaseTest {
@Inject
protected TaskAuditService taskAuditService;
@Test
public void testComplete() {
long initTimeMs = new Date().getTime();
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
taskService.release(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
// Go straight from Ready to Inprogress
taskService.start(taskId, "Darth Vader");
Task task1 = taskService.getTaskById(taskId);
assertEquals(Status.InProgress, task1.getTaskData().getStatus());
assertEquals("Darth Vader", task1.getTaskData().getActualOwner().getId());
// Check is Complete
taskService.complete(taskId, "Darth Vader", null);
Task task2 = taskService.getTaskById(taskId);
assertEquals(Status.Completed, task2.getTaskData().getStatus());
assertEquals("Darth Vader", task2.getTaskData().getActualOwner().getId());
List<TaskEvent> allTaskEvents = taskService.execute(new GetAuditEventsCommand(taskId, new QueryFilter(0, 0)));
assertEquals(6, allTaskEvents.size());
// test DeleteAuditEventsCommand
int numFirstTaskEvents = allTaskEvents.size();
task = new TaskFluent().setName("This is my task name 2")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long secondTaskId = task.getId();
taskService.claim(secondTaskId, "Darth Vader");
taskService.start(secondTaskId, "Darth Vader");
taskService.complete(secondTaskId, "Darth Vader", null);
allTaskEvents = taskService.execute(new GetAuditEventsCommand());
int numTaskEvents = allTaskEvents.size();
assertTrue("Expected more than " + numFirstTaskEvents + " events: " + numTaskEvents,
numTaskEvents > numFirstTaskEvents);
taskService.execute(new DeleteAuditEventsCommand(taskId));
allTaskEvents = taskService.execute(new GetAuditEventsCommand());
assertEquals(numTaskEvents - numFirstTaskEvents, allTaskEvents.size());
taskService.execute(new DeleteAuditEventsCommand());
allTaskEvents = taskService.execute(new GetAuditEventsCommand());
assertEquals(0, allTaskEvents.size());
// test get/delete BAM Task summaries commands
List<BAMTaskSummaryImpl> bamTaskList = taskService.execute(new GetBAMTaskSummariesCommand());
assertEquals("BAM Task Summary list size: ", 2, bamTaskList.size());
taskService.execute(new DeleteBAMTaskSummariesCommand(taskId));
bamTaskList = taskService.execute(new GetBAMTaskSummariesCommand());
assertEquals("BAM Task Summary list size after delete (task id: " + taskId + ") : ", 1, bamTaskList.size());
bamTaskList = taskService.execute(new GetBAMTaskSummariesCommand(secondTaskId));
assertEquals("BAM Task Summary list size after delete (task id: " + taskId + ") : ", 1, bamTaskList.size());
taskService.execute(new DeleteBAMTaskSummariesCommand());
bamTaskList = taskService.execute(new GetBAMTaskSummariesCommand());
assertEquals("BAM Task Summary list size after delete (task id: " + taskId + ") : ", 0, bamTaskList.size());
List<AuditTask> allHistoryAuditTasks = taskAuditService.getAllAuditTasks(new QueryFilter(0, 0));
assertEquals(2, allHistoryAuditTasks.size());
// test last modification date was generated
long currentTimeMs = new Date().getTime();
for(AuditTask at : allHistoryAuditTasks){
Date modDate = ((AuditTaskImpl)at).getLastModificationDate();
assertNotNull(modDate);
long modDateMs = modDate.getTime();
assertTrue("Task " + at.getTaskId() + " modification date is not too much in the past", modDateMs >= initTimeMs);
assertTrue("Task " + at.getTaskId() + " modification date is not in the future", modDateMs <= currentTimeMs);
}
}
@Test
public void testOnlyActiveTasks() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialUser("salaboy")
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
List<TaskSummary> allActiveTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allActiveTasks.size());
assertTrue(allActiveTasks.get(0).getStatusId().equals("Reserved"));
QueryFilter queryFilter = new QueryFilter(0, 0);
Map<String, Object> params = new HashMap<String, Object>();
List<String> statuses = new ArrayList<String>();
statuses.add(Status.Reserved.toString());
params.put("statuses", statuses);
queryFilter.setParams(params);
List<AuditTask> allActiveAuditTasksByUser = taskAuditService.getAllAuditTasksByStatus("salaboy",
queryFilter);
assertEquals(1, allActiveAuditTasksByUser.size());
assertTrue(allActiveAuditTasksByUser.get(0).getStatus().equals("Reserved"));
statuses = new ArrayList<String>();
statuses.add(Status.Completed.toString());
params.put("statuses", statuses);
queryFilter.setParams(params);
allActiveAuditTasksByUser = taskAuditService.getAllAuditTasksByStatus("salaboy",
queryFilter);
assertEquals(0, allActiveAuditTasksByUser.size());
}
@Test
public void testGroupTasks() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialUser("salaboy")
.addPotentialUser("krisv")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
List<TaskSummary> allGroupTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupTasks.size());
assertTrue(allGroupTasks.get(0).getStatusId().equals("Ready"));
List<AuditTask> allGroupAuditTasksByUser = taskAuditService.getAllGroupAuditTasksByUser("salaboy",
new QueryFilter(0, 0));
assertEquals(1, allGroupAuditTasksByUser.size());
assertTrue(allGroupAuditTasksByUser.get(0).getStatus().equals("Ready"));
}
@Test
public void testAdminTasks() {
Task task = new TaskFluent().setName("This is my task name")
.setAdminUser("salaboy")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
List<TaskSummary> allAdminTasks = taskService.getTasksAssignedAsBusinessAdministrator("salaboy", null);
assertEquals(1, allAdminTasks.size());
List<AuditTask> allAdminAuditTasksByUser = taskAuditService.getAllAdminAuditTasksByUser("salaboy",
new QueryFilter(0, 0));
assertEquals(1, allAdminAuditTasksByUser.size());
}
@Test
public void testExitAfterClaim() {
// One potential owner, should go straight to state Reserved
Task task = new TaskFluent().setName("This is my task name 2")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
taskService.exit(taskId, "Administrator");
List<AuditTask> allHistoryAuditTasks = taskAuditService.getAllAuditTasks(new QueryFilter(0, 0));
assertEquals(1, allHistoryAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
}
@Test
public void testExitBeforeClaim() {
Task task = new TaskFluent().setName("This is my task name 2")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
taskService.exit(taskId, "Administrator");
List<AuditTask> allHistoryAuditTasks = taskAuditService.getAllAuditTasks(new QueryFilter(0, 0));
assertEquals(1, allHistoryAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
}
private void testDescriptionUpdate(String oldDescription, String newDescription, boolean changeExpected) {
Task task = new TaskFluent()
.setDescription(oldDescription)
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
List<I18NText> descriptions = new ArrayList<I18NText>();
descriptions.add(new I18NTextImpl("", newDescription));
taskService.setDescriptions(taskId, descriptions);
task = taskService.getTaskById(taskId);
Assertions.assertThat(task.getDescription()).isEqualTo(newDescription);
List<AuditTask> auditTasks = taskAuditService.getAllAuditTasks(new QueryFilter());
Assertions.assertThat(auditTasks).hasSize(1);
Assertions.assertThat(auditTasks.get(0).getDescription()).isEqualTo(newDescription);
List<TaskEvent> taskEvents = taskAuditService.getAllTaskEvents(taskId, new QueryFilter());
if (changeExpected) {
Assertions.assertThat(taskEvents).hasSize(2);
Assertions.assertThat(taskEvents.get(1).getMessage()).contains(String.valueOf(oldDescription),
String.valueOf(newDescription));
} else {
Assertions.assertThat(taskEvents).hasSize(1);
}
}
@Test
public void testDescriptionUpdateSame() {
testDescriptionUpdate("description", "description", false);
}
@Test
public void testDescriptionUpdateDifferent() {
testDescriptionUpdate("old description", "new description", true);
}
@Test
public void testDescriptionUpdateToNull() {
testDescriptionUpdate("old description", null, true);
}
@Test
public void testDescriptionUpdateToEmpty() {
testDescriptionUpdate("old description", "", true);
}
@Test
public void testDescriptionUpdateFromNull() {
testDescriptionUpdate(null, "new description", true);
}
@Test
public void testDescriptionUpdateFromEmpty() {
testDescriptionUpdate("", "new description", true);
}
private void testNameUpdate(String oldName, String newName, boolean changeExpected) {
Task task = new TaskFluent()
.setName(oldName)
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
List<I18NText> taskNames = new ArrayList<I18NText>();
taskNames.add(new I18NTextImpl("", newName));
taskService.setTaskNames(taskId, taskNames);
task = taskService.getTaskById(taskId);
Assertions.assertThat(task.getName()).isEqualTo(newName);
List<AuditTask> auditTasks = taskAuditService.getAllAuditTasks(new QueryFilter());
Assertions.assertThat(auditTasks).hasSize(1);
Assertions.assertThat(auditTasks.get(0).getName()).isEqualTo(newName);
List<TaskEvent> taskEvents = taskAuditService.getAllTaskEvents(taskId, new QueryFilter());
if (changeExpected) {
Assertions.assertThat(taskEvents).hasSize(2);
Assertions.assertThat(taskEvents.get(1).getMessage()).contains(String.valueOf(oldName),
String.valueOf(newName));
} else {
Assertions.assertThat(taskEvents).hasSize(1);
}
}
@Test
public void testNameUpdateSame() {
testNameUpdate("name", "name", false);
}
@Test
public void testNameUpdateDifferent() {
testNameUpdate("old name", "new name", true);
}
@Test
public void testNameUpdateToNull() {
testNameUpdate("old name", null, true);
}
@Test
public void testNameUpdateToEmpty() {
testNameUpdate("old name", "", true);
}
@Test
public void testNameUpdateFromNull() {
testNameUpdate(null, "new name", true);
}
@Test
public void testNameUpdateFromEmpty() {
testNameUpdate("", "new name", true);
}
private void testPriorityUpdate(int oldPriority, int newPriority, boolean changeExpected) {
Task task = new TaskFluent()
.setPriority(oldPriority)
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
taskService.setPriority(taskId, newPriority);
task = taskService.getTaskById(taskId);
Assertions.assertThat(task.getPriority()).isEqualTo(newPriority);
List<AuditTask> auditTasks = taskAuditService.getAllAuditTasks(new QueryFilter());
Assertions.assertThat(auditTasks).hasSize(1);
Assertions.assertThat(auditTasks.get(0).getPriority()).isEqualTo(newPriority);
List<TaskEvent> taskEvents = taskAuditService.getAllTaskEvents(taskId, new QueryFilter());
if (changeExpected) {
Assertions.assertThat(taskEvents).hasSize(2);
Assertions.assertThat(taskEvents.get(1).getMessage()).contains(String.valueOf(oldPriority),
String.valueOf(newPriority));
} else {
Assertions.assertThat(taskEvents).hasSize(1);
}
}
@Test
public void testPriorityUpdateSame() {
testPriorityUpdate(0, 0, false);
}
@Test
public void testPriorityUpdateDifferent() {
testPriorityUpdate(0, 10, true);
}
private void testDueDateUpdate(Date oldDate, Date newDate, boolean changeExpected) {
Task task = new TaskFluent()
.setDueDate(oldDate)
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
taskService.setExpirationDate(taskId, newDate);
task = taskService.getTaskById(taskId);
Assertions.assertThat(task.getTaskData().getExpirationTime()).isEqualTo(newDate);
List<AuditTask> auditTasks = taskAuditService.getAllAuditTasks(new QueryFilter());
Assertions.assertThat(auditTasks).hasSize(1);
Assertions.assertThat(auditTasks.get(0).getDueDate()).isEqualTo(newDate);
List<TaskEvent> taskEvents = taskAuditService.getAllTaskEvents(taskId, new QueryFilter());
if (changeExpected) {
Assertions.assertThat(taskEvents).hasSize(2);
Assertions.assertThat(taskEvents.get(1).getMessage()).contains(String.valueOf(oldDate),
String.valueOf(newDate));
} else {
Assertions.assertThat(taskEvents).hasSize(1);
}
}
private Timestamp getToday() {
return new Timestamp(new Date().getTime());
}
private Timestamp getTomorrow() {
Calendar c = Calendar.getInstance();
c.setTime(getToday());
c.add(Calendar.DATE, 1);
return new Timestamp(c.getTimeInMillis());
}
@Test
public void testDueDateUpdateSame() {
final Timestamp today = getToday();
testDueDateUpdate(today, today, false);
}
@Test
public void testDueDateUpdateDifferent() {
testDueDateUpdate(getToday(), getTomorrow(), true);
}
@Test
public void testDueDateUpdateFromNull() {
testDueDateUpdate(null, getTomorrow(), true);
}
@Test
public void testDueDateUpdateToNull() {
testDueDateUpdate(getToday(), null, true);
}
@Test
public void testVariableIndexInputAndOutput() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
Map<String, Object> inputVariables = new HashMap<String, Object>();
inputVariables.put("firstVariable", "string content");
inputVariables.put("number", 1234);
taskService.addTask(task, inputVariables);
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
List<TaskVariable> inputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.INPUT).build().getResultList();
assertNotNull(inputVars);
assertEquals(2, inputVars.size());
Map<String, String> vars = collectVariableNameAndValue(inputVars);
assertTrue(vars.containsKey("firstVariable"));
assertTrue(vars.containsKey("number"));
assertEquals("string content", vars.get("firstVariable"));
assertEquals("1234", vars.get("number"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
taskService.start(taskId, "Darth Vader");
Task task1 = taskService.getTaskById(taskId);
assertEquals(Status.InProgress, task1.getTaskData().getStatus());
assertEquals("Darth Vader", task1.getTaskData().getActualOwner().getId());
Map<String, Object> outputVariables = new HashMap<String, Object>();
outputVariables.put("reply", "updated content");
outputVariables.put("age", 25);
// Check is Complete
taskService.complete(taskId, "Darth Vader", outputVariables);
List<TaskVariable> outputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.OUTPUT).build().getResultList();
assertNotNull(outputVars);
assertEquals(2, outputVars.size());
Map<String, String> outvars = collectVariableNameAndValue(outputVars);
assertTrue(outvars.containsKey("reply"));
assertTrue(outvars.containsKey("age"));
assertEquals("updated content", outvars.get("reply"));
assertEquals("25", outvars.get("age"));
}
@Test
public void testVariableIndexInputAndUpdateOutput() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
Map<String, Object> inputVariables = new HashMap<String, Object>();
inputVariables.put("firstVariable", "string content");
inputVariables.put("number", 1234);
taskService.addTask(task, inputVariables);
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
List<TaskVariable> inputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.INPUT).build().getResultList();
assertNotNull(inputVars);
assertEquals(2, inputVars.size());
Map<String, String> vars = collectVariableNameAndValue(inputVars);
assertTrue(vars.containsKey("firstVariable"));
assertTrue(vars.containsKey("number"));
assertEquals("string content", vars.get("firstVariable"));
assertEquals("1234", vars.get("number"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
taskService.start(taskId, "Darth Vader");
Task task1 = taskService.getTaskById(taskId);
assertEquals(Status.InProgress, task1.getTaskData().getStatus());
assertEquals("Darth Vader", task1.getTaskData().getActualOwner().getId());
// update task output
Map<String, Object> outputVariables = new HashMap<String, Object>();
outputVariables.put("reply", "updated content");
outputVariables.put("age", 25);
taskService.addOutputContentFromUser(taskId, "Darth Vader", outputVariables);
List<TaskVariable> outputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.OUTPUT).build().getResultList();
assertNotNull(outputVars);
assertEquals(2, outputVars.size());
Map<String, String> outvars = collectVariableNameAndValue(outputVars);
assertTrue(outvars.containsKey("reply"));
assertTrue(outvars.containsKey("age"));
assertEquals("updated content", outvars.get("reply"));
assertEquals("25", outvars.get("age"));
// Check is Complete
outputVariables = new HashMap<String, Object>();
outputVariables.put("reply", "completed content");
outputVariables.put("age", 44);
outputVariables.put("reason", "rework, please");
taskService.complete(taskId, "Darth Vader", outputVariables);
outputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.OUTPUT).build().getResultList();
assertNotNull(outputVars);
assertEquals(3, outputVars.size());
outvars = collectVariableNameAndValue(outputVars);
assertTrue(outvars.containsKey("reply"));
assertTrue(outvars.containsKey("age"));
assertTrue(outvars.containsKey("reason"));
assertEquals("completed content", outvars.get("reply"));
assertEquals("44", outvars.get("age"));
assertEquals("rework, please", outvars.get("reason"));
}
@Test
public void testVariableIndexInputAndOutputWithCustomIdexer() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
Map<String, Object> inputVariables = new HashMap<String, Object>();
inputVariables.put("firstVariable", "string content");
inputVariables.put("person", new Person("john", 25));
taskService.addTask(task, inputVariables);
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
List<TaskVariable> inputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.INPUT).build().getResultList();
assertNotNull(inputVars);
assertEquals(3, inputVars.size());
Map<String, String> vars = collectVariableNameAndValue(inputVars);
assertTrue(vars.containsKey("firstVariable"));
assertTrue(vars.containsKey("person.name"));
assertTrue(vars.containsKey("person.age"));
assertEquals("string content", vars.get("firstVariable"));
assertEquals("john", vars.get("person.name"));
assertEquals("25", vars.get("person.age"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
taskService.start(taskId, "Darth Vader");
Task task1 = taskService.getTaskById(taskId);
assertEquals(Status.InProgress, task1.getTaskData().getStatus());
assertEquals("Darth Vader", task1.getTaskData().getActualOwner().getId());
Map<String, Object> outputVariables = new HashMap<String, Object>();
outputVariables.put("reply", "updated content");
outputVariables.put("person", new Person("mary", 28));
// Check is Complete
taskService.complete(taskId, "Darth Vader", outputVariables);
List<TaskVariable> outputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.OUTPUT).build().getResultList();
assertNotNull(outputVars);
assertEquals(3, outputVars.size());
Map<String, String> outvars = collectVariableNameAndValue(outputVars);
assertTrue(outvars.containsKey("reply"));
assertTrue(vars.containsKey("person.name"));
assertTrue(vars.containsKey("person.age"));
assertEquals("updated content", outvars.get("reply"));
assertEquals("mary", outvars.get("person.name"));
assertEquals("28", outvars.get("person.age"));
}
@Test
public void testSearchTasksByVariable() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
Map<String, Object> inputVariables = new HashMap<String, Object>();
inputVariables.put("firstVariable", "string content");
inputVariables.put("number", 1234);
taskService.addTask(task, inputVariables);
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
List<TaskVariable> inputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.INPUT).build().getResultList();
assertNotNull(inputVars);
assertEquals(2, inputVars.size());
Map<String, String> vars = collectVariableNameAndValue(inputVars);
assertTrue(vars.containsKey("firstVariable"));
assertTrue(vars.containsKey("number"));
assertEquals("string content", vars.get("firstVariable"));
assertEquals("1234", vars.get("number"));
List<TaskSummary> tasksByVariable = taskService.taskSummaryQuery("salaboy")
.variableName("firstVariable").build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
// search by unauthorized user
tasksByVariable = taskService.taskSummaryQuery("WinterMute")
.variableName("fistVariable").build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(0, tasksByVariable.size());
// search by not existing variable
tasksByVariable = taskService.taskSummaryQuery("salaboy")
.variableName("notexistingVariable").build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(0, tasksByVariable.size());
// search by variable name with wildcard
tasksByVariable = taskService.taskSummaryQuery("salaboy").regex()
.variableName("first*").build().getResultList();
assertNotNull(tasksByVariable);
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
}
@Test
public void testSearchTasksByVariableNameAndValue() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
Map<String, Object> inputVariables = new HashMap<String, Object>();
String userId = "salaboy";
String varName = "firstVariable";
String varValue = "string content";
inputVariables.put(varName, varValue);
inputVariables.put("number", 1234);
taskService.addTask(task, inputVariables);
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner(userId, null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
List<TaskVariable> inputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).build().getResultList();
assertNotNull(inputVars);
assertEquals(2, inputVars.size());
Map<String, String> vars = collectVariableNameAndValue(inputVars);
assertTrue(vars.containsKey(varName));
assertTrue(vars.containsKey("number"));
assertEquals(varValue, vars.get(varName));
assertEquals("1234", vars.get("number"));
List<TaskSummary> tasksByVariable = taskService.taskSummaryQuery(userId)
.variableName(varName).and().variableValue(varValue).build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
// search with value wild card
tasksByVariable = taskService.taskSummaryQuery(userId)
.variableName(varName).and().regex().variableValue("string*").build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
//search with name and value wild card
tasksByVariable = taskService.taskSummaryQuery(userId)
.regex().variableName("first*").and().variableValue("string*").build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
// search with unauthorized user
tasksByVariable = taskService.taskSummaryQuery("WinterMute")
.regex().variableName(varName).and().variableValue(varValue).build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(0, tasksByVariable.size());
// search with non existing variable
tasksByVariable = taskService.taskSummaryQuery(userId)
.regex().variableName("nonexistingvariable").and().variableValue(varValue).build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(0, tasksByVariable.size());
// search with not matching value
tasksByVariable = taskService.taskSummaryQuery(userId)
.regex().variableName(varName).and().variableValue("updated content").build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(0, tasksByVariable.size());
}
@Test
public void testVariableIndexInputAndOutputWitlLongText() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
Map<String, Object> inputVariables = new HashMap<String, Object>();
inputVariables.put("firstVariable", "string content");
inputVariables.put("number", 1234);
taskService.addTask(task, inputVariables);
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
List<TaskVariable> inputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.INPUT).build().getResultList();
assertNotNull(inputVars);
assertEquals(2, inputVars.size());
Map<String, String> vars = collectVariableNameAndValue(inputVars);
assertTrue(vars.containsKey("firstVariable"));
assertTrue(vars.containsKey("number"));
assertEquals("string content", vars.get("firstVariable"));
assertEquals("1234", vars.get("number"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
taskService.start(taskId, "Darth Vader");
Task task1 = taskService.getTaskById(taskId);
assertEquals(Status.InProgress, task1.getTaskData().getStatus());
assertEquals("Darth Vader", task1.getTaskData().getActualOwner().getId());
String reply = "Just a short part of the reply";
String veryLongReply = reply;
for (int i = 0; i < 15; i++) {
veryLongReply += reply;
}
Map<String, Object> outputVariables = new HashMap<String, Object>();
outputVariables.put("reply", veryLongReply);
outputVariables.put("age", 25);
// Check is Complete
taskService.complete(taskId, "Darth Vader", outputVariables);
List<TaskVariable> outputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.OUTPUT).build().getResultList();
assertNotNull(outputVars);
assertEquals(2, outputVars.size());
Map<String, String> outvars = collectVariableNameAndValue(outputVars);
assertTrue(outvars.containsKey("reply"));
assertTrue(outvars.containsKey("age"));
assertEquals(veryLongReply, outvars.get("reply"));
assertEquals("25", outvars.get("age"));
}
@Test
public void testVariableIndexInputAndOutputWitlLongTextTrimmed() {
System.setProperty("org.jbpm.task.var.log.length", "10");
try {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
Map<String, Object> inputVariables = new HashMap<String, Object>();
inputVariables.put("firstVariable", "string content");
inputVariables.put("number", 1234);
taskService.addTask(task, inputVariables);
long taskId = task.getId();
List<TaskSummary> allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Ready"));
List<TaskVariable> inputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.INPUT).build().getResultList();
assertNotNull(inputVars);
assertEquals(2, inputVars.size());
Map<String, String> vars = collectVariableNameAndValue(inputVars);
assertTrue(vars.containsKey("firstVariable"));
assertTrue(vars.containsKey("number"));
// the variable was longer that 10 so it had to be trimmed
assertEquals("string con", vars.get("firstVariable"));
assertEquals("1234", vars.get("number"));
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("salaboy", null, null, null);
assertEquals(0, allGroupAuditTasks.size());
allGroupAuditTasks = taskService.getTasksAssignedAsPotentialOwner("Darth Vader", null, null, null);
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatusId().equals("Reserved"));
taskService.start(taskId, "Darth Vader");
Task task1 = taskService.getTaskById(taskId);
assertEquals(Status.InProgress, task1.getTaskData().getStatus());
assertEquals("Darth Vader", task1.getTaskData().getActualOwner().getId());
String reply = "Just a short part of the reply";
String veryLongReply = reply;
for (int i = 0; i < 15; i++) {
veryLongReply += reply;
}
Map<String, Object> outputVariables = new HashMap<String, Object>();
outputVariables.put("reply", veryLongReply);
outputVariables.put("age", 25);
// Check is Complete
taskService.complete(taskId, "Darth Vader", outputVariables);
List<TaskVariable> outputVars = taskAuditService.taskVariableQuery()
.taskId(taskId).intersect().type(VariableType.OUTPUT).build().getResultList();
assertNotNull(outputVars);
assertEquals(2, outputVars.size());
Map<String, String> outvars = collectVariableNameAndValue(outputVars);
assertTrue(outvars.containsKey("reply"));
assertTrue(outvars.containsKey("age"));
assertEquals("Just a sho", outvars.get("reply"));
assertEquals("25", outvars.get("age"));
} finally {
System.clearProperty("org.jbpm.task.var.log.length");
}
}
@Test
public void testLifeCycleWithBAM() {
Task task = new TaskFluent().setName("This is my task name")
.addPotentialGroup("Knights Templer")
.setAdminUser("Administrator")
.getTask();
taskService.addTask(task, new HashMap<String, Object>());
long taskId = task.getId();
List<AuditTask> allGroupAuditTasks = taskAuditService.getAllGroupAuditTasksByUser("Knights Templer", new QueryFilter());
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatus().equals("Ready"));
assertBAMTask(taskId, "Ready");
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskAuditService.getAllAuditTasksByUser("Darth Vader", new QueryFilter());
assertEquals(1, allGroupAuditTasks.size());
assertEquals("Reserved", allGroupAuditTasks.get(0).getStatus());
assertBAMTask(taskId, "Reserved");
taskService.release(taskId, "Darth Vader");
allGroupAuditTasks = taskAuditService.getAllGroupAuditTasksByUser("Knights Templer", new QueryFilter());
assertEquals(1, allGroupAuditTasks.size());
assertTrue(allGroupAuditTasks.get(0).getStatus().equals("Ready"));
assertBAMTask(taskId, "Ready");
taskService.claim(taskId, "Darth Vader");
allGroupAuditTasks = taskAuditService.getAllAuditTasksByUser("Darth Vader", new QueryFilter());;
assertEquals(1, allGroupAuditTasks.size());
assertEquals("Reserved", allGroupAuditTasks.get(0).getStatus());
assertBAMTask(taskId, "Reserved");
// Go straight from Ready to Inprogress
taskService.start(taskId, "Darth Vader");
allGroupAuditTasks = taskAuditService.getAllAuditTasksByUser("Darth Vader", new QueryFilter());;
assertEquals(1, allGroupAuditTasks.size());
assertEquals("InProgress", allGroupAuditTasks.get(0).getStatus());
assertBAMTask(taskId, "InProgress");
taskService.stop(taskId, "Darth Vader");
allGroupAuditTasks = taskAuditService.getAllAuditTasksByUser("Darth Vader", new QueryFilter());;
assertEquals(1, allGroupAuditTasks.size());
assertEquals("Reserved", allGroupAuditTasks.get(0).getStatus());
assertBAMTask(taskId, "Reserved");
taskService.start(taskId, "Darth Vader");
allGroupAuditTasks = taskAuditService.getAllAuditTasksByUser("Darth Vader", new QueryFilter());;
assertEquals(1, allGroupAuditTasks.size());
assertEquals("InProgress", allGroupAuditTasks.get(0).getStatus());
assertBAMTask(taskId, "InProgress");
// Check is Complete
taskService.complete(taskId, "Darth Vader", null);
allGroupAuditTasks = taskAuditService.getAllAuditTasksByUser("Darth Vader", new QueryFilter());;
assertEquals(1, allGroupAuditTasks.size());
assertEquals("Completed", allGroupAuditTasks.get(0).getStatus());
assertBAMTask(taskId, "Completed");
}
protected Map<String, String> collectVariableNameAndValue(List<TaskVariable> variables) {
Map<String, String> nameValue = new HashMap<String, String>();
for (TaskVariable taskVar : variables) {
nameValue.put(taskVar.getName(), taskVar.getValue());
}
return nameValue;
}
protected void assertBAMTask(long taskId, String expectedStatus) {
EntityManager em = getEntityManager();
BAMTaskSummaryImpl task = (BAMTaskSummaryImpl) em.createQuery("select bt from BAMTaskSummaryImpl bt where bt.taskId = :taskId")
.setParameter("taskId", taskId)
.getSingleResult();
assertNotNull(task);
assertEquals(taskId, task.getTaskId());
assertEquals(expectedStatus, task.getStatus());
em.close();
}
protected abstract EntityManager getEntityManager();
}
| apache-2.0 |
apache/geronimo | testsuite/aries-testsuite/osgi-service-custom/custom-eba/src/test/java/org/apache/geronimo/testsuite/aries/custom/test/CustomOSGIServiceTest.java | 2264 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.testsuite.aries.custom.test;
import org.apache.geronimo.testsupport.TestSupport;
import org.testng.annotations.Test;
import org.apache.geronimo.testsupport.HttpUtils;
import java.net.URL;
import java.io.IOException;
/**
* Unit test for simple App.
*/
public class CustomOSGIServiceTest extends TestSupport{
private String baseURL = "http://localhost:8080/";
@Test
public void testServlet() throws Exception {
checkReply("/CustomServlet");
}
private void checkReply(String address)
throws Exception {
String warName = System.getProperty("webAppName");
assertNotNull(warName);
URL url = new URL(baseURL + warName + address);
String reply = doGET(url, 6, 10 * 1000);
assertTrue("Custom OSGI Service",
reply.contains("Hello!"));
}
private String doGET(URL url, int repeat, long delay) {
for (int i = 0; i < repeat; i++) {
try {
return HttpUtils.doGET(url);
} catch (IOException e) {
// ignore
try {
Thread.sleep(delay);
} catch (Exception ee) {
break;
}
}
}
fail("Did not get servlet response in time");
return "";
}
} | apache-2.0 |
ebondareva/bootique | bootique/src/test/java/io/bootique/jackson/InstantDeserializerIT.java | 1383 | /*
* Licensed to ObjectStyle LLC under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ObjectStyle LLC licenses
* this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.bootique.jackson;
import org.junit.Test;
import java.time.Instant;
import static org.junit.Assert.assertEquals;
public class InstantDeserializerIT extends DeserializerTestBase {
@Test
public void testDeserialize() throws Exception {
Bean o = deserialize(Bean.class, "instant: \"2018-04-05T12:34:42.212Z\"");
assertEquals(Instant.ofEpochMilli(1522931682212L), o.instant);
}
static class Bean {
protected Instant instant;
public void setInstant(Instant instant) {
this.instant = instant;
}
}
}
| apache-2.0 |
yanzhijun/jclouds-aliyun | scriptbuilder/src/main/java/org/jclouds/scriptbuilder/statements/ssh/InstallRSAPrivateKey.java | 2494 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.scriptbuilder.statements.ssh;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.jclouds.scriptbuilder.domain.Statements.appendFile;
import static org.jclouds.scriptbuilder.domain.Statements.exec;
import org.jclouds.scriptbuilder.domain.OsFamily;
import org.jclouds.scriptbuilder.domain.Statement;
import org.jclouds.scriptbuilder.domain.StatementList;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableList.Builder;
public class InstallRSAPrivateKey implements Statement {
private final String sshDir;
private final String privateKey;
public InstallRSAPrivateKey(String privateKey) {
this("~/.ssh", privateKey);
}
public InstallRSAPrivateKey(String sshDir, String privateKey) {
this.sshDir = checkNotNull(sshDir, "sshDir");
this.privateKey = checkNotNull(privateKey, "privateKey");
}
@Override
public Iterable<String> functionDependencies(OsFamily family) {
return ImmutableList.of();
}
@Override
public String render(OsFamily family) {
checkNotNull(family, "family");
if (family == OsFamily.WINDOWS)
throw new UnsupportedOperationException("windows not yet implemented");
Builder<Statement> statements = ImmutableList.builder();
statements.add(exec("{md} " + sshDir));
String idRsa = sshDir + "{fs}id_rsa";
statements.add(exec("{rm} " + idRsa));
statements.add(appendFile(idRsa, Splitter.on('\n').split(privateKey)));
statements.add(exec("chmod 600 " + idRsa));
return new StatementList(statements.build()).render(family);
}
}
| apache-2.0 |
grooviter/asteroid | asteroid-core/src/main/java/asteroid/transformer/AbstractTransformer.java | 1768 | package asteroid.transformer;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.ast.ModuleNode;
import org.codehaus.groovy.ast.ClassCodeExpressionTransformer;
/**
* Most transformers need at some point the source unit in order to
* fix or apply properly the scope to each variable.
*
* This class enforces the use of a SourceUnit instance for every
* transformer
*
* @since 0.2.0
*
*/
public abstract class AbstractTransformer
extends ClassCodeExpressionTransformer implements Transformer {
private final SourceUnit sourceUnit;
/**
* This constructor needs a source unit
*
* @param sourceUnit the related source unit where the expression belongs
* @since 0.2.0
*/
public AbstractTransformer(final SourceUnit sourceUnit) {
this.sourceUnit = sourceUnit;
}
/**
* This method returns the source unit
*
* @return the source unit related to the expression we want to transform
* @since 0.2.0
*/
public SourceUnit getSourceUnit() {
return this.sourceUnit;
}
/**
* This method returns the module of the current
* SourceUnit instance
*
* @return a ModuleNode instance
* @since 0.2.0
*/
public ModuleNode getModule() {
return sourceUnit.getAST();
}
/**
* Sometimes could be useful to get the package name
* of the current module
*
* @return A String representing the current qualified package name
* @since 0.2.0
*/
public String getModulePackageName() {
final ModuleNode module = getModule();
if (module != null && module.getPackageName() != null) {
return module.getPackageName();
}
return null;
}
}
| apache-2.0 |
sardine/mina-ja | src/mina-core/src/test/java/org/apache/mina/util/CircularQueueTest.java | 7058 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.util;
import junit.framework.TestCase;
import java.util.Iterator;
/**
* Tests {@link org.apache.mina.util.CircularQueue}
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public class CircularQueueTest extends TestCase {
private volatile int pushCount;
private volatile int popCount;
public void setUp() {
pushCount = 0;
popCount = 0;
}
public void testRotation() {
CircularQueue<Integer> q = new CircularQueue<Integer>(); // DEFAULT_CAPACITY = 4
testRotation0(q);
}
public void testExpandingRotation() {
CircularQueue<Integer> q = new CircularQueue<Integer>(); // DEFAULT_CAPACITY = 4
for (int i = 0; i < 10; i++) {
testRotation0(q);
// make expansion happen
int oldCapacity = q.capacity();
for (int j = q.capacity(); j >= 0; j--) {
q.offer(new Integer(++pushCount));
}
assertTrue(q.capacity() > oldCapacity);
testRotation0(q);
}
}
private void testRotation0(CircularQueue<Integer> q) {
for (int i = 0; i < q.capacity() * 7 / 4; i++) {
q.offer(new Integer(++pushCount));
assertEquals(++popCount, q.poll().intValue());
}
}
public void testRandomAddOnQueue() {
CircularQueue<Integer> q = new CircularQueue<Integer>();
// Create a queue with 5 elements and capacity 8;
for (int i = 0; i < 5; i++) {
q.offer(new Integer(i));
}
q.add(0, new Integer(100));
q.add(3, new Integer(200));
q.add(7, new Integer(300));
Iterator<Integer> i = q.iterator();
assertEquals(8, q.size());
assertEquals(new Integer(100), i.next());
assertEquals(new Integer(0), i.next());
assertEquals(new Integer(1), i.next());
assertEquals(new Integer(200), i.next());
assertEquals(new Integer(2), i.next());
assertEquals(new Integer(3), i.next());
assertEquals(new Integer(4), i.next());
assertEquals(new Integer(300), i.next());
try {
i.next();
fail();
} catch (Exception e) {
// an exception signifies a successfull test case
assertTrue(true);
}
}
public void testRandomAddOnRotatedQueue() {
CircularQueue<Integer> q = getRotatedQueue();
q.add(0, new Integer(100)); // addFirst
q.add(2, new Integer(200));
q.add(4, new Integer(300));
q.add(10, new Integer(400));
q.add(12, new Integer(500)); // addLast
Iterator<Integer> i = q.iterator();
assertEquals(13, q.size());
assertEquals(new Integer(100), i.next());
assertEquals(new Integer(0), i.next());
assertEquals(new Integer(200), i.next());
assertEquals(new Integer(1), i.next());
assertEquals(new Integer(300), i.next());
assertEquals(new Integer(2), i.next());
assertEquals(new Integer(3), i.next());
assertEquals(new Integer(4), i.next());
assertEquals(new Integer(5), i.next());
assertEquals(new Integer(6), i.next());
assertEquals(new Integer(400), i.next());
assertEquals(new Integer(7), i.next());
assertEquals(new Integer(500), i.next());
try {
i.next();
fail();
} catch (Exception e) {
// an exception signifies a successfull test case
assertTrue(true);
}
}
public void testRandomRemoveOnQueue() {
CircularQueue<Integer> q = new CircularQueue<Integer>();
// Create a queue with 5 elements and capacity 8;
for (int i = 0; i < 5; i++) {
q.offer(new Integer(i));
}
q.remove(0);
q.remove(2);
q.remove(2);
Iterator<Integer> i = q.iterator();
assertEquals(2, q.size());
assertEquals(new Integer(1), i.next());
assertEquals(new Integer(2), i.next());
try {
i.next();
fail();
} catch (Exception e) {
// an exception signifies a successfull test case
assertTrue(true);
}
}
public void testRandomRemoveOnRotatedQueue() {
CircularQueue<Integer> q = getRotatedQueue();
q.remove(0); // removeFirst
q.remove(2); // removeLast in the first half
q.remove(2); // removeFirst in the first half
q.remove(4); // removeLast
Iterator<Integer> i = q.iterator();
assertEquals(4, q.size());
assertEquals(new Integer(1), i.next());
assertEquals(new Integer(2), i.next());
assertEquals(new Integer(5), i.next());
assertEquals(new Integer(6), i.next());
try {
i.next();
fail();
} catch (Exception e) {
// an exception signifies a successfull test case
assertTrue(true);
}
}
public void testExpandAndShrink() throws Exception {
CircularQueue<Integer> q = new CircularQueue<Integer>();
for (int i = 0; i < 1024; i ++) {
q.offer(i);
}
assertEquals(1024, q.capacity());
for (int i = 0; i < 512; i ++) {
q.offer(i);
q.poll();
}
assertEquals(2048, q.capacity());
for (int i = 0; i < 1024; i ++) {
q.poll();
}
assertEquals(4, q.capacity());
}
private CircularQueue<Integer> getRotatedQueue() {
CircularQueue<Integer> q = new CircularQueue<Integer>();
// Ensure capacity: 16
for (int i = 0; i < 16; i++) {
q.offer(new Integer(-1));
}
q.clear();
// Rotate it
for (int i = 0; i < 12; i++) {
q.offer(new Integer(-1));
q.poll();
}
// Now push items
for (int i = 0; i < 8; i++) {
q.offer(new Integer(i));
}
return q;
}
public static void main(String[] args) {
junit.textui.TestRunner.run(CircularQueueTest.class);
}
} | apache-2.0 |
xasx/assertj-core | src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isNotEqualToNormalizingWhitespace_Test.java | 1388 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.api.charsequence;
import static org.mockito.Mockito.verify;
import org.assertj.core.api.CharSequenceAssert;
import org.assertj.core.api.CharSequenceAssertBaseTest;
/**
* Tests for <code>{@link org.assertj.core.api.CharSequenceAssert#isNotEqualToNormalizingWhitespace(CharSequence)}</code>.
*
* @author Dan Corder
*/
public class CharSequenceAssert_isNotEqualToNormalizingWhitespace_Test extends CharSequenceAssertBaseTest {
@Override
protected CharSequenceAssert invoke_api_method() {
return assertions.isNotEqualToNormalizingWhitespace(" my foo bar ");
}
@Override
protected void verify_internal_effects() {
verify(strings).assertNotEqualsNormalizingWhitespace(getInfo(assertions), getActual(assertions), " my foo bar ");
}
}
| apache-2.0 |
sll8192/wpan | app/src/main/java/com/xinyu/mwp/networkapi/Host.java | 1838 | package com.xinyu.mwp.networkapi;
import android.text.TextUtils;
/**
* Created by wsz on 2016/4/27.
*/
public enum Host {
INTERNAL_TEST("http://code.ywwl.com/modou"),
EXTERNAL_TEST("http://tandroidapi.modou.com"),
EXTERNAL_RELEASE("http://androidapi.modou.com");
private String host;
private String oldHost;
Host(String value) {
this.host = value;
}
public String getValue() {
return host;
}
public void switchHost() {
if (!EXTERNAL_RELEASE.getValue().equals(host)) {
oldHost = host;
host = EXTERNAL_RELEASE.getValue();
} else if (!TextUtils.isEmpty(oldHost)) {
host = oldHost;
}
}
public static int getImPort(Host host) {
if (host == EXTERNAL_RELEASE) {
return 5432;
} else {
return 5222;
}
}
public static String getImHost(Host host) {
if (host == EXTERNAL_RELEASE) {
return "im.modou.com";
} else if (host == INTERNAL_TEST) {
return "code.ywwl.com";
} else {
return "tim.modou.com";
}
}
public static String serverIp = "139.224.34.22";
public static short serverPort = 16205;
public static String getSocketServerIp() {
// return "122.144.169.217";
// return "139.224.34.22";
// return serverIp;
return "a.flight.dlgrme.com";
}
public static short getSocketServerPort() {
return serverPort;
// return serverPort;
}
public static void setSocketServerIp(String serverIp) {
Host.serverIp = serverIp;
}
public static void setSocketServerPort(short serverPort) {
Host.serverPort = serverPort;
}
}
| apache-2.0 |
impactcentre/ocrevalUAtion | src/test/java/eu/digitisation/layout/BoundingBoxTest.java | 1377 | /*
* Copyright (C) 2014 Universidad de Alicante
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package eu.digitisation.layout;
import java.awt.Polygon;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
/**
*
* @author R.C.C
*/
public class BoundingBoxTest {
/**
* Test of asPolygon method, of class BoundingBox.
*/
@Test
public void testToPolygon() {
System.out.println("toPolygon");
Polygon expResult = new BoundingBox(0, 0, 20, 20).asPolygon();
BoundingBox instance = new BoundingBox(0, 0, 10, 20);
instance.add(new BoundingBox(10, 10, 20, 20));
assertEquals(expResult.getBounds(), instance);
}
}
| apache-2.0 |
fnp/pylucene | lucene-java-3.5.0/lucene/src/test/org/apache/lucene/index/TestNorms.java | 9369 | package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Random;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
/**
* Test that norms info is preserved during index life - including
* separate norms, addDocument, addIndexes, forceMerge.
*/
public class TestNorms extends LuceneTestCase {
private class SimilarityOne extends DefaultSimilarity {
@Override
public float computeNorm(String fieldName, FieldInvertState state) {
// Disable length norm
return state.getBoost();
}
}
private static final int NUM_FIELDS = 10;
private Similarity similarityOne;
private Analyzer anlzr;
private int numDocNorms;
private ArrayList<Float> norms;
private ArrayList<Float> modifiedNorms;
private float lastNorm = 0;
private float normDelta = (float) 0.001;
@Override
public void setUp() throws Exception {
super.setUp();
similarityOne = new SimilarityOne();
anlzr = new MockAnalyzer(random);
}
/**
* Test that norms values are preserved as the index is maintained.
* Including separate norms.
* Including merging indexes with seprate norms.
* Including forceMerge.
*/
public void testNorms() throws IOException {
Directory dir1 = newDirectory();
norms = new ArrayList<Float>();
modifiedNorms = new ArrayList<Float>();
createIndex(random, dir1);
doTestNorms(random, dir1);
// test with a single index: index2
ArrayList<Float> norms1 = norms;
ArrayList<Float> modifiedNorms1 = modifiedNorms;
int numDocNorms1 = numDocNorms;
norms = new ArrayList<Float>();
modifiedNorms = new ArrayList<Float>();
numDocNorms = 0;
Directory dir2 = newDirectory();
createIndex(random, dir2);
doTestNorms(random, dir2);
// add index1 and index2 to a third index: index3
Directory dir3 = newDirectory();
createIndex(random, dir3);
IndexWriter iw = new IndexWriter(dir3, newIndexWriterConfig(
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5).setMergePolicy(newLogMergePolicy(3)));
iw.addIndexes(new Directory[]{dir1,dir2});
iw.forceMerge(1);
iw.close();
norms1.addAll(norms);
norms = norms1;
modifiedNorms1.addAll(modifiedNorms);
modifiedNorms = modifiedNorms1;
numDocNorms += numDocNorms1;
// test with index3
verifyIndex(dir3);
doTestNorms(random, dir3);
// now with single segment
iw = new IndexWriter(dir3, newIndexWriterConfig( TEST_VERSION_CURRENT,
anlzr).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(5).setMergePolicy(newLogMergePolicy(3)));
iw.forceMerge(1);
iw.close();
verifyIndex(dir3);
dir1.close();
dir2.close();
dir3.close();
}
private void doTestNorms(Random random, Directory dir) throws IOException {
int num = atLeast(1);
for (int i=0; i<num; i++) {
addDocs(random, dir,12,true);
verifyIndex(dir);
modifyNormsForF1(dir);
verifyIndex(dir);
addDocs(random, dir,12,false);
verifyIndex(dir);
modifyNormsForF1(dir);
verifyIndex(dir);
}
}
private void createIndex(Random random, Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(5).setSimilarity(similarityOne).setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(true);
iw.close();
}
private void modifyNormsForF1(Directory dir) throws IOException {
IndexReader ir = IndexReader.open(dir, false);
int n = ir.maxDoc();
for (int i = 0; i < n; i+=3) { // modify for every third doc
int k = (i*3) % modifiedNorms.size();
float origNorm = modifiedNorms.get(i).floatValue();
float newNorm = modifiedNorms.get(k).floatValue();
//System.out.println("Modifying: for "+i+" from "+origNorm+" to "+newNorm);
//System.out.println(" and: for "+k+" from "+newNorm+" to "+origNorm);
modifiedNorms.set(i, Float.valueOf(newNorm));
modifiedNorms.set(k, Float.valueOf(origNorm));
ir.setNorm(i, "f"+1, newNorm);
ir.setNorm(k, "f"+1, origNorm);
}
ir.close();
}
private void verifyIndex(Directory dir) throws IOException {
IndexReader ir = IndexReader.open(dir, false);
for (int i = 0; i < NUM_FIELDS; i++) {
String field = "f"+i;
byte b[] = ir.norms(field);
assertEquals("number of norms mismatches",numDocNorms,b.length);
ArrayList<Float> storedNorms = (i==1 ? modifiedNorms : norms);
for (int j = 0; j < b.length; j++) {
float norm = similarityOne.decodeNormValue(b[j]);
float norm1 = storedNorms.get(j).floatValue();
assertEquals("stored norm value of "+field+" for doc "+j+" is "+norm+" - a mismatch!", norm, norm1, 0.000001);
}
}
ir.close();
}
private void addDocs(Random random, Directory dir, int ndocs, boolean compound) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5).setSimilarity(similarityOne).setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(compound);
for (int i = 0; i < ndocs; i++) {
iw.addDocument(newDoc());
}
iw.close();
}
// create the next document
private Document newDoc() {
Document d = new Document();
float boost = nextNorm();
for (int i = 0; i < 10; i++) {
Field f = newField("f"+i,"v"+i,Store.NO,Index.NOT_ANALYZED);
f.setBoost(boost);
d.add(f);
}
return d;
}
// return unique norm values that are unchanged by encoding/decoding
private float nextNorm() {
float norm = lastNorm + normDelta;
do {
float norm1 = similarityOne.decodeNormValue(similarityOne.encodeNormValue(norm));
if (norm1 > lastNorm) {
//System.out.println(norm1+" > "+lastNorm);
norm = norm1;
break;
}
norm += normDelta;
} while (true);
norms.add(numDocNorms, Float.valueOf(norm));
modifiedNorms.add(numDocNorms, Float.valueOf(norm));
//System.out.println("creating norm("+numDocNorms+"): "+norm);
numDocNorms ++;
lastNorm = (norm>10 ? 0 : norm); //there's a limit to how many distinct values can be stored in a ingle byte
return norm;
}
class CustomNormEncodingSimilarity extends DefaultSimilarity {
@Override
public byte encodeNormValue(float f) {
return (byte) f;
}
@Override
public float decodeNormValue(byte b) {
return (float) b;
}
@Override
public float computeNorm(String field, FieldInvertState state) {
return (float) state.getLength();
}
}
// LUCENE-1260
public void testCustomEncoder() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
config.setSimilarity(new CustomNormEncodingSimilarity());
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
Document doc = new Document();
Field foo = newField("foo", "", Field.Store.NO, Field.Index.ANALYZED);
Field bar = newField("bar", "", Field.Store.NO, Field.Index.ANALYZED);
doc.add(foo);
doc.add(bar);
for (int i = 0; i < 100; i++) {
bar.setValue("singleton");
writer.addDocument(doc);
}
IndexReader reader = writer.getReader();
writer.close();
byte fooNorms[] = reader.norms("foo");
for (int i = 0; i < reader.maxDoc(); i++)
assertEquals(0, fooNorms[i]);
byte barNorms[] = reader.norms("bar");
for (int i = 0; i < reader.maxDoc(); i++)
assertEquals(1, barNorms[i]);
reader.close();
dir.close();
}
}
| apache-2.0 |
gastaldi/hibernate-validator | hibernate-validator/src/main/java/org/hibernate/validator/engine/MessageInterpolatorContext.java | 2685 | /*
* JBoss, Home of Professional Open Source
* Copyright 2009, Red Hat, Inc. and/or its affiliates, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibernate.validator.engine;
import javax.validation.metadata.ConstraintDescriptor;
import javax.validation.MessageInterpolator;
/**
* Implementation of the context used during message interpolation.
*
* @author Emmanuel Bernard
* @author Hardy Ferentschik
*/
public class MessageInterpolatorContext implements MessageInterpolator.Context {
private final ConstraintDescriptor<?> constraintDescriptor;
private final Object validatedValue;
public MessageInterpolatorContext(ConstraintDescriptor<?> constraintDescriptor, Object validatedValue) {
this.constraintDescriptor = constraintDescriptor;
this.validatedValue = validatedValue;
}
public ConstraintDescriptor<?> getConstraintDescriptor() {
return constraintDescriptor;
}
public Object getValidatedValue() {
return validatedValue;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
MessageInterpolatorContext that = ( MessageInterpolatorContext ) o;
if ( constraintDescriptor != null ? !constraintDescriptor.equals( that.constraintDescriptor ) : that.constraintDescriptor != null ) {
return false;
}
if ( validatedValue != null ? !validatedValue.equals( that.validatedValue ) : that.validatedValue != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = constraintDescriptor != null ? constraintDescriptor.hashCode() : 0;
result = 31 * result + ( validatedValue != null ? validatedValue.hashCode() : 0 );
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append( "MessageInterpolatorContext" );
sb.append( "{constraintDescriptor=" ).append( constraintDescriptor );
sb.append( ", validatedValue=" ).append( validatedValue );
sb.append( '}' );
return sb.toString();
}
}
| apache-2.0 |
aglne/dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/utils/CollectionUtilsTest.java | 7407 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.utils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static org.apache.dubbo.common.utils.CollectionUtils.isEmpty;
import static org.apache.dubbo.common.utils.CollectionUtils.isNotEmpty;
import static org.apache.dubbo.common.utils.CollectionUtils.toMap;
import static org.apache.dubbo.common.utils.CollectionUtils.toStringMap;
import static java.util.Collections.emptyList;
import static java.util.Collections.singleton;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CollectionUtilsTest {
@Test
public void testSort() throws Exception {
List<Integer> list = new ArrayList<Integer>();
list.add(100);
list.add(10);
list.add(20);
List<Integer> expected = new ArrayList<Integer>();
expected.add(10);
expected.add(20);
expected.add(100);
assertEquals(expected, CollectionUtils.sort(list));
}
@Test
public void testSortNull() throws Exception {
assertNull(CollectionUtils.sort(null));
assertTrue(CollectionUtils.sort(new ArrayList<Integer>()).isEmpty());
}
@Test
public void testSortSimpleName() throws Exception {
List<String> list = new ArrayList<String>();
list.add("aaa.z");
list.add("b");
list.add(null);
list.add("zzz.a");
list.add("c");
list.add(null);
List<String> sorted = CollectionUtils.sortSimpleName(list);
assertNull(sorted.get(0));
assertNull(sorted.get(1));
}
@Test
public void testSortSimpleNameNull() throws Exception {
assertNull(CollectionUtils.sortSimpleName(null));
assertTrue(CollectionUtils.sortSimpleName(new ArrayList<String>()).isEmpty());
}
@Test
public void testSplitAll() throws Exception {
assertNull(CollectionUtils.splitAll(null, null));
assertNull(CollectionUtils.splitAll(null, "-"));
assertTrue(CollectionUtils.splitAll(new HashMap<String, List<String>>(), "-").isEmpty());
Map<String, List<String>> input = new HashMap<String, List<String>>();
input.put("key1", Arrays.asList("1:a", "2:b", "3:c"));
input.put("key2", Arrays.asList("1:a", "2:b"));
input.put("key3", null);
input.put("key4", new ArrayList<String>());
Map<String, Map<String, String>> expected = new HashMap<String, Map<String, String>>();
expected.put("key1", CollectionUtils.toStringMap("1", "a", "2", "b", "3", "c"));
expected.put("key2", CollectionUtils.toStringMap("1", "a", "2", "b"));
expected.put("key3", null);
expected.put("key4", new HashMap<String, String>());
assertEquals(expected, CollectionUtils.splitAll(input, ":"));
}
@Test
public void testJoinAll() throws Exception {
assertNull(CollectionUtils.joinAll(null, null));
assertNull(CollectionUtils.joinAll(null, "-"));
Map<String, List<String>> expected = new HashMap<String, List<String>>();
expected.put("key1", Arrays.asList("1:a", "2:b", "3:c"));
expected.put("key2", Arrays.asList("1:a", "2:b"));
expected.put("key3", null);
expected.put("key4", new ArrayList<String>());
Map<String, Map<String, String>> input = new HashMap<String, Map<String, String>>();
input.put("key1", CollectionUtils.toStringMap("1", "a", "2", "b", "3", "c"));
input.put("key2", CollectionUtils.toStringMap("1", "a", "2", "b"));
input.put("key3", null);
input.put("key4", new HashMap<String, String>());
Map<String, List<String>> output = CollectionUtils.joinAll(input, ":");
for (Map.Entry<String, List<String>> entry : output.entrySet()) {
if (entry.getValue() == null)
continue;
Collections.sort(entry.getValue());
}
assertEquals(expected, output);
}
@Test
public void testJoinList() throws Exception {
List<String> list = Arrays.asList();
assertEquals("", CollectionUtils.join(list, "/"));
list = Arrays.asList("x");
assertEquals("x", CollectionUtils.join(list, "-"));
list = Arrays.asList("a", "b");
assertEquals("a/b", CollectionUtils.join(list, "/"));
}
@Test
public void testMapEquals() throws Exception {
assertTrue(CollectionUtils.mapEquals(null, null));
assertFalse(CollectionUtils.mapEquals(null, new HashMap<String, String>()));
assertFalse(CollectionUtils.mapEquals(new HashMap<String, String>(), null));
assertTrue(CollectionUtils.mapEquals(CollectionUtils.toStringMap("1", "a", "2", "b"), CollectionUtils.toStringMap("1", "a", "2", "b")));
assertFalse(CollectionUtils.mapEquals(CollectionUtils.toStringMap("1", "a"), CollectionUtils.toStringMap("1", "a", "2", "b")));
}
@Test
public void testStringMap1() throws Exception {
assertThat(toStringMap("key", "value"), equalTo(Collections.singletonMap("key", "value")));
}
@Test
public void testStringMap2() throws Exception {
Assertions.assertThrows(IllegalArgumentException.class, () -> toStringMap("key", "value", "odd"));
}
@Test
public void testToMap1() throws Exception {
assertTrue(CollectionUtils.toMap().isEmpty());
Map<String, Integer> expected = new HashMap<String, Integer>();
expected.put("a", 1);
expected.put("b", 2);
expected.put("c", 3);
assertEquals(expected, CollectionUtils.toMap("a", 1, "b", 2, "c", 3));
}
@Test
public void testToMap2() throws Exception {
Assertions.assertThrows(IllegalArgumentException.class, () -> toMap("a", "b", "c"));
}
@Test
public void testIsEmpty() throws Exception {
assertThat(isEmpty(null), is(true));
assertThat(isEmpty(new HashSet()), is(true));
assertThat(isEmpty(emptyList()), is(true));
}
@Test
public void testIsNotEmpty() throws Exception {
assertThat(isNotEmpty(singleton("a")), is(true));
}
} | apache-2.0 |
amansinha100/incubator-calcite | core/src/main/java/org/eigenbase/rel/WindowRel.java | 10447 | /*
// Licensed to Julian Hyde under one or more contributor license
// agreements. See the NOTICE file distributed with this work for
// additional information regarding copyright ownership.
//
// Julian Hyde licenses this file to you under the Apache License,
// Version 2.0 (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
*/
package org.eigenbase.rel;
import java.util.*;
import org.eigenbase.relopt.*;
import org.eigenbase.reltype.*;
import org.eigenbase.rex.*;
import org.eigenbase.sql.SqlNode;
import org.eigenbase.util.Pair;
import org.eigenbase.util.Util;
import net.hydromatic.linq4j.Ord;
import net.hydromatic.optiq.util.BitSets;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Multimap;
/**
* A relational expression representing a set of window aggregates.
*
* <p>A window rel can handle several window aggregate functions, over several
* partitions, with pre- and post-expressions, and an optional post-filter.
* Each of the partitions is defined by a partition key (zero or more columns)
* and a range (logical or physical). The partitions expect the data to be
* sorted correctly on input to the relational expression.
*
* <p>Each {@link org.eigenbase.rel.WindowRelBase.Window} has a set of
* {@link org.eigenbase.rex.RexOver} objects.
*
* <p>Created by {@link org.eigenbase.rel.rules.WindowedAggSplitterRule}.
*/
public final class WindowRel extends WindowRelBase {
/**
* Creates a WindowRel.
*
* @param cluster Cluster
* @param child Input relational expression
* @param rowType Output row type
* @param windows Windows
*/
public WindowRel(
RelOptCluster cluster, RelTraitSet traits, RelNode child,
RelDataType rowType, List<Window> windows) {
super(cluster, traits, child, rowType, windows);
}
@Override
public WindowRel copy(RelTraitSet traitSet, List<RelNode> inputs) {
return new WindowRel(
getCluster(), traitSet, sole(inputs), rowType, windows);
}
/**
* Creates a WindowRel.
*/
public static RelNode create(
RelOptCluster cluster,
RelTraitSet traitSet,
RelNode child,
final RexProgram program,
RelDataType outRowType) {
// Build a list of distinct windows, partitions and aggregate
// functions.
final Multimap<WindowKey, RexOver> windowMap =
LinkedListMultimap.create();
// Build a list of windows, partitions, and aggregate functions. Each
// aggregate function will add its arguments as outputs of the input
// program.
for (RexNode agg : program.getExprList()) {
if (agg instanceof RexOver) {
addWindows(windowMap, (RexOver) agg);
}
}
final Map<RexOver, WindowRelBase.RexWinAggCall> aggMap =
new HashMap<RexOver, WindowRelBase.RexWinAggCall>();
List<Window> windowList = new ArrayList<Window>();
for (Map.Entry<WindowKey, Collection<RexOver>> entry
: windowMap.asMap().entrySet()) {
final WindowKey windowKey = entry.getKey();
final List<RexWinAggCall> aggCalls =
new ArrayList<RexWinAggCall>();
for (RexOver over : entry.getValue()) {
final RexWinAggCall aggCall =
new RexWinAggCall(
over.getAggOperator(),
over.getType(),
toInputRefs(over.operands),
aggMap.size());
aggCalls.add(aggCall);
aggMap.put(over, aggCall);
}
windowList.add(
new Window(
windowKey.groupSet,
windowKey.isRows,
windowKey.lowerBound,
windowKey.upperBound,
windowKey.orderKeys,
aggCalls));
}
// Figure out the type of the inputs to the output program.
// They are: the inputs to this rel, followed by the outputs of
// each window.
final List<WindowRelBase.RexWinAggCall> flattenedAggCallList =
new ArrayList<WindowRelBase.RexWinAggCall>();
List<Map.Entry<String, RelDataType>> fieldList =
new ArrayList<Map.Entry<String, RelDataType>>(
child.getRowType().getFieldList());
final int offset = fieldList.size();
// Use better field names for agg calls that are projected.
Map<Integer, String> fieldNames = new HashMap<Integer, String>();
for (Ord<RexLocalRef> ref : Ord.zip(program.getProjectList())) {
final int index = ref.e.getIndex();
if (index >= offset) {
fieldNames.put(
index - offset, outRowType.getFieldNames().get(ref.i));
}
}
for (Ord<Window> window : Ord.zip(windowList)) {
for (Ord<RexWinAggCall> over : Ord.zip(window.e.aggCalls)) {
// Add the k-th over expression of
// the i-th window to the output of the program.
String name = fieldNames.get(over.i);
if (name == null || name.startsWith("$")) {
name = "w" + window.i + "$o" + over.i;
}
fieldList.add(Pair.of(name, over.e.getType()));
flattenedAggCallList.add(over.e);
}
}
final RelDataType intermediateRowType =
cluster.getTypeFactory().createStructType(fieldList);
final int inputFieldCount = child.getRowType().getFieldCount();
// The output program is the windowed agg's program, combined with
// the output calc (if it exists).
RexShuttle shuttle =
new RexShuttle() {
public RexNode visitOver(RexOver over) {
// Look up the aggCall which this expr was translated to.
final WindowRelBase.RexWinAggCall aggCall =
aggMap.get(over);
assert aggCall != null;
assert RelOptUtil.eq(
"over",
over.getType(),
"aggCall",
aggCall.getType(),
true);
// Find the index of the aggCall among all partitions of all
// windows.
final int aggCallIndex =
flattenedAggCallList.indexOf(aggCall);
assert aggCallIndex >= 0;
// Replace expression with a reference to the window slot.
final int index = inputFieldCount + aggCallIndex;
assert RelOptUtil.eq(
"over",
over.getType(),
"intermed",
intermediateRowType.getFieldList().get(index).getType(),
true);
return new RexInputRef(
index,
over.getType());
}
public RexNode visitLocalRef(RexLocalRef localRef) {
final int index = localRef.getIndex();
if (index < inputFieldCount) {
// Reference to input field.
return localRef;
}
return new RexLocalRef(
flattenedAggCallList.size() + index,
localRef.getType());
}
};
// TODO: The order that the "over" calls occur in the windows and
// partitions may not match the order in which they occurred in the
// original expression. We should add a project to permute them.
WindowRel window =
new WindowRel(
cluster, traitSet, child, intermediateRowType, windowList);
return CalcRel.createProject(
window,
new AbstractList<RexNode>() {
public RexNode get(int index) {
final RexLocalRef ref = program.getProjectList().get(index);
return new RexInputRef(ref.getIndex(), ref.getType());
}
public int size() {
return program.getProjectList().size();
}
},
outRowType.getFieldNames());
}
private static List<RexNode> toInputRefs(final List<RexNode> operands) {
return new AbstractList<RexNode>() {
public int size() {
return operands.size();
}
public RexNode get(int index) {
final RexNode operand = operands.get(index);
assert operand instanceof RexLocalRef;
final RexLocalRef ref = (RexLocalRef) operand;
return new RexInputRef(ref.getIndex(), ref.getType());
}
};
}
/** Window specification. All windowed aggregates over the same window
* (regardless of how it is specified, in terms of a named window or specified
* attribute by attribute) will end up with the same window key. */
private static class WindowKey {
private final BitSet groupSet;
private final RelCollation orderKeys;
private final boolean isRows;
private final SqlNode lowerBound;
private final SqlNode upperBound;
public WindowKey(
BitSet groupSet,
RelCollation orderKeys,
boolean isRows,
SqlNode lowerBound,
SqlNode upperBound) {
this.groupSet = groupSet;
this.orderKeys = orderKeys;
this.isRows = isRows;
this.lowerBound = lowerBound;
this.upperBound = upperBound;
}
@Override
public int hashCode() {
return Util.hashV(
groupSet, orderKeys, isRows, lowerBound, upperBound);
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj instanceof WindowKey
&& groupSet.equals(((WindowKey) obj).groupSet)
&& orderKeys.equals(((WindowKey) obj).orderKeys)
&& Util.equal(lowerBound, ((WindowKey) obj).lowerBound)
&& Util.equal(upperBound, ((WindowKey) obj).upperBound)
&& isRows == ((WindowKey) obj).isRows;
}
}
private static void addWindows(
Multimap<WindowKey, RexOver> windowMap,
RexOver over) {
final RexWindow aggWindow = over.getWindow();
// Look up or create a window.
RelCollation orderKeys = getCollation(aggWindow.orderKeys);
BitSet groupSet =
BitSets.of(getProjectOrdinals(aggWindow.partitionKeys));
WindowKey windowKey =
new WindowKey(
groupSet, orderKeys, aggWindow.isRows(),
aggWindow.getLowerBound(), aggWindow.getUpperBound());
windowMap.put(windowKey, over);
}
}
// End WindowRel.java
| apache-2.0 |
sabriarabacioglu/engerek | gui/admin-gui/src/main/java/com/evolveum/midpoint/web/page/admin/server/dto/OperationResultStatusIcon.java | 3042 | /*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.page.admin.server.dto;
import com.evolveum.midpoint.schema.result.OperationResultStatus;
import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultStatusType;
/**
* @author shood
* */
public enum OperationResultStatusIcon {
UNKNOWN("fa fa-fw fa-question-circle fa-lg text-warning"),
SUCCESS("fa fa-fw fa-check-circle fa-lg text-success"),
WARNING("fa fa-fw fa-exclamation-circle fa-lg text-warning"),
PARTIAL_ERROR("fa fa-fw fa-minus-circle fa-lg text-danger"),
FATAL_ERROR("fa fa-fw fa-times-circle fa-lg text-danger"),
HANDLED_ERROR("fa fa-fw fa-minus-circle fa-lg text-warning"),
NOT_APPLICABLE("fa fa-fw fa-check-circle fa-lg text-muted"),
IN_PROGRESS("fa fa-fw fa-clock-o fa-lg text-info");
private String icon;
private OperationResultStatusIcon(String icon){
this.icon = icon;
}
public String getIcon(){
return icon;
}
public static OperationResultStatusIcon parseOperationalResultStatus(OperationResultStatusType statusType){
if (statusType == null) {
return UNKNOWN;
}
switch (statusType) {
case FATAL_ERROR:
return FATAL_ERROR;
case PARTIAL_ERROR:
return PARTIAL_ERROR;
case HANDLED_ERROR:
return HANDLED_ERROR;
case SUCCESS:
return SUCCESS;
case WARNING:
return WARNING;
case NOT_APPLICABLE:
return NOT_APPLICABLE;
case IN_PROGRESS:
return IN_PROGRESS;
default:
return UNKNOWN;
}
}
public static OperationResultStatusIcon parseOperationalResultStatus(OperationResultStatus statusType){
if (statusType == null) {
return UNKNOWN;
}
switch (statusType) {
case FATAL_ERROR:
return FATAL_ERROR;
case PARTIAL_ERROR:
return PARTIAL_ERROR;
case HANDLED_ERROR:
return HANDLED_ERROR;
case SUCCESS:
return SUCCESS;
case WARNING:
return WARNING;
case NOT_APPLICABLE:
return NOT_APPLICABLE;
case IN_PROGRESS:
return IN_PROGRESS;
default:
return UNKNOWN;
}
}
}
| apache-2.0 |
sdl/ecommerce-framework | dxa.java/ecommerce-framework-dxa-module/src/main/java/com/sdl/ecommerce/dxa/model/SearchFeedbackWidget.java | 1153 | package com.sdl.ecommerce.dxa.model;
import com.sdl.ecommerce.api.model.QuerySuggestion;
import com.sdl.webapp.common.api.mapping.semantic.annotations.SemanticEntity;
import com.sdl.webapp.common.api.mapping.semantic.annotations.SemanticProperty;
import com.sdl.webapp.common.api.model.RichText;
import com.sdl.webapp.common.api.model.entity.AbstractEntityModel;
import java.util.List;
import static com.sdl.webapp.common.api.mapping.semantic.config.SemanticVocabulary.SDL_CORE;
/**
* Search Feedback Widget
*
* @author nic
*/
@SemanticEntity(entityName = "SearchFeedback", vocabulary = SDL_CORE, prefix = "e", public_ = false)
public class SearchFeedbackWidget extends AbstractEntityModel {
@SemanticProperty("e:spellCheckLabel")
private RichText spellCheckLabel;
private List<QuerySuggestion> querySuggestions;
public List<QuerySuggestion> getQuerySuggestions() {
return querySuggestions;
}
public void setQuerySuggestions(List<QuerySuggestion> querySuggestions) {
this.querySuggestions = querySuggestions;
}
public RichText getSpellCheckLabel() {
return spellCheckLabel;
}
}
| apache-2.0 |
sluk3r/lucene-3.0.2-src-study | src/java/org/apache/lucene/analysis/tokenattributes/FlagsAttributeImpl.java | 2293 | package org.apache.lucene.analysis.tokenattributes;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.Serializable;
import org.apache.lucene.util.AttributeImpl;
/**
* This attribute can be used to pass different flags down the tokenizer chain,
* eg from one TokenFilter to another one.
*/
public class FlagsAttributeImpl extends AttributeImpl implements FlagsAttribute, Cloneable, Serializable {
private int flags = 0;
/**
* EXPERIMENTAL: While we think this is here to stay, we may want to change it to be a long.
* <p/>
*
* Get the bitset for any bits that have been set. This is completely distinct from {@link TypeAttribute#type()}, although they do share similar purposes.
* The flags can be used to encode information about the token for use by other {@link org.apache.lucene.analysis.TokenFilter}s.
*
*
* @return The bits
*/
public int getFlags() {
return flags;
}
/**
* @see #getFlags()
*/
public void setFlags(int flags) {
this.flags = flags;
}
@Override
public void clear() {
flags = 0;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof FlagsAttributeImpl) {
return ((FlagsAttributeImpl) other).flags == flags;
}
return false;
}
@Override
public int hashCode() {
return flags;
}
@Override
public void copyTo(AttributeImpl target) {
FlagsAttribute t = (FlagsAttribute) target;
t.setFlags(flags);
}
}
| apache-2.0 |
xiaoyuQi/JavaProject | dubbo-admin/src/main/java/com/alibaba/dubbo/governance/web/sysinfo/module/screen/Dumps.java | 2100 | /*
* Copyright 1999-2101 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.governance.web.sysinfo.module.screen;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import com.alibaba.dubbo.governance.service.ConsumerService;
import com.alibaba.dubbo.governance.service.ProviderService;
import com.alibaba.dubbo.governance.web.common.module.screen.Restful;
/**
* @author tony.chenl
*/
public class Dumps extends Restful {
@Autowired
ProviderService providerDAO;
@Autowired
ConsumerService consumerDAO;
@Autowired
HttpServletResponse response;
public void index(Map<String, Object> context) {
context.put("noProviderServices", getNoProviders());
context.put("services", providerDAO.findServices());
context.put("providers", providerDAO.findAll());
context.put("consumers", consumerDAO.findAll());
}
private List<String> getNoProviders() {
List<String> providerServices = providerDAO.findServices();
List<String> consumerServices = consumerDAO.findServices();
List<String> noProviderServices = new ArrayList<String>();
if (consumerServices != null) {
noProviderServices.addAll(consumerServices);
noProviderServices.removeAll(providerServices);
}
return noProviderServices;
}
}
| apache-2.0 |
sdole/aws-sdk-java | aws-java-sdk-elasticsearch/src/main/java/com/amazonaws/services/elasticsearch/model/UpdateElasticsearchDomainConfigRequest.java | 17624 | /*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticsearch.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Container for the parameters to the
* <code><a>UpdateElasticsearchDomain</a></code> operation. Specifies the type
* and number of instances in the domain cluster.
* </p>
*/
public class UpdateElasticsearchDomainConfigRequest extends
AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the Elasticsearch domain that you are updating.
* </p>
*/
private String domainName;
/**
* <p>
* The type and number of instances to instantiate for the domain cluster.
* </p>
*/
private ElasticsearchClusterConfig elasticsearchClusterConfig;
/**
* <p>
* Specify the type and size of the EBS volume that you want to use.
* </p>
*/
private EBSOptions eBSOptions;
/**
* <p>
* Option to set the time, in UTC format, for the daily automated snapshot.
* Default value is <code>0</code> hours.
* </p>
*/
private SnapshotOptions snapshotOptions;
/**
* <p>
* Modifies the advanced option to allow references to indices in an HTTP
* request body. Must be <code>false</code> when configuring access to
* individual sub-resources. By default, the value is <code>true</code>. See
* <a href=
* "http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options"
* target="_blank">Configuration Advanced Options</a> for more information.
* </p>
*/
private java.util.Map<String, String> advancedOptions;
/**
* <p>
* IAM access policy as a JSON-formatted string.
* </p>
*/
private String accessPolicies;
/**
* <p>
* The name of the Elasticsearch domain that you are updating.
* </p>
*
* @param domainName
* The name of the Elasticsearch domain that you are updating.
*/
public void setDomainName(String domainName) {
this.domainName = domainName;
}
/**
* <p>
* The name of the Elasticsearch domain that you are updating.
* </p>
*
* @return The name of the Elasticsearch domain that you are updating.
*/
public String getDomainName() {
return this.domainName;
}
/**
* <p>
* The name of the Elasticsearch domain that you are updating.
* </p>
*
* @param domainName
* The name of the Elasticsearch domain that you are updating.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateElasticsearchDomainConfigRequest withDomainName(
String domainName) {
setDomainName(domainName);
return this;
}
/**
* <p>
* The type and number of instances to instantiate for the domain cluster.
* </p>
*
* @param elasticsearchClusterConfig
* The type and number of instances to instantiate for the domain
* cluster.
*/
public void setElasticsearchClusterConfig(
ElasticsearchClusterConfig elasticsearchClusterConfig) {
this.elasticsearchClusterConfig = elasticsearchClusterConfig;
}
/**
* <p>
* The type and number of instances to instantiate for the domain cluster.
* </p>
*
* @return The type and number of instances to instantiate for the domain
* cluster.
*/
public ElasticsearchClusterConfig getElasticsearchClusterConfig() {
return this.elasticsearchClusterConfig;
}
/**
* <p>
* The type and number of instances to instantiate for the domain cluster.
* </p>
*
* @param elasticsearchClusterConfig
* The type and number of instances to instantiate for the domain
* cluster.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateElasticsearchDomainConfigRequest withElasticsearchClusterConfig(
ElasticsearchClusterConfig elasticsearchClusterConfig) {
setElasticsearchClusterConfig(elasticsearchClusterConfig);
return this;
}
/**
* <p>
* Specify the type and size of the EBS volume that you want to use.
* </p>
*
* @param eBSOptions
* Specify the type and size of the EBS volume that you want to use.
*/
public void setEBSOptions(EBSOptions eBSOptions) {
this.eBSOptions = eBSOptions;
}
/**
* <p>
* Specify the type and size of the EBS volume that you want to use.
* </p>
*
* @return Specify the type and size of the EBS volume that you want to use.
*/
public EBSOptions getEBSOptions() {
return this.eBSOptions;
}
/**
* <p>
* Specify the type and size of the EBS volume that you want to use.
* </p>
*
* @param eBSOptions
* Specify the type and size of the EBS volume that you want to use.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateElasticsearchDomainConfigRequest withEBSOptions(
EBSOptions eBSOptions) {
setEBSOptions(eBSOptions);
return this;
}
/**
* <p>
* Option to set the time, in UTC format, for the daily automated snapshot.
* Default value is <code>0</code> hours.
* </p>
*
* @param snapshotOptions
* Option to set the time, in UTC format, for the daily automated
* snapshot. Default value is <code>0</code> hours.
*/
public void setSnapshotOptions(SnapshotOptions snapshotOptions) {
this.snapshotOptions = snapshotOptions;
}
/**
* <p>
* Option to set the time, in UTC format, for the daily automated snapshot.
* Default value is <code>0</code> hours.
* </p>
*
* @return Option to set the time, in UTC format, for the daily automated
* snapshot. Default value is <code>0</code> hours.
*/
public SnapshotOptions getSnapshotOptions() {
return this.snapshotOptions;
}
/**
* <p>
* Option to set the time, in UTC format, for the daily automated snapshot.
* Default value is <code>0</code> hours.
* </p>
*
* @param snapshotOptions
* Option to set the time, in UTC format, for the daily automated
* snapshot. Default value is <code>0</code> hours.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateElasticsearchDomainConfigRequest withSnapshotOptions(
SnapshotOptions snapshotOptions) {
setSnapshotOptions(snapshotOptions);
return this;
}
/**
* <p>
* Modifies the advanced option to allow references to indices in an HTTP
* request body. Must be <code>false</code> when configuring access to
* individual sub-resources. By default, the value is <code>true</code>. See
* <a href=
* "http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options"
* target="_blank">Configuration Advanced Options</a> for more information.
* </p>
*
* @return Modifies the advanced option to allow references to indices in an
* HTTP request body. Must be <code>false</code> when configuring
* access to individual sub-resources. By default, the value is
* <code>true</code>. See <a href=
* "http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options"
* target="_blank">Configuration Advanced Options</a> for more
* information.
*/
public java.util.Map<String, String> getAdvancedOptions() {
return advancedOptions;
}
/**
* <p>
* Modifies the advanced option to allow references to indices in an HTTP
* request body. Must be <code>false</code> when configuring access to
* individual sub-resources. By default, the value is <code>true</code>. See
* <a href=
* "http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options"
* target="_blank">Configuration Advanced Options</a> for more information.
* </p>
*
* @param advancedOptions
* Modifies the advanced option to allow references to indices in an
* HTTP request body. Must be <code>false</code> when configuring
* access to individual sub-resources. By default, the value is
* <code>true</code>. See <a href=
* "http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options"
* target="_blank">Configuration Advanced Options</a> for more
* information.
*/
public void setAdvancedOptions(java.util.Map<String, String> advancedOptions) {
this.advancedOptions = advancedOptions;
}
/**
* <p>
* Modifies the advanced option to allow references to indices in an HTTP
* request body. Must be <code>false</code> when configuring access to
* individual sub-resources. By default, the value is <code>true</code>. See
* <a href=
* "http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options"
* target="_blank">Configuration Advanced Options</a> for more information.
* </p>
*
* @param advancedOptions
* Modifies the advanced option to allow references to indices in an
* HTTP request body. Must be <code>false</code> when configuring
* access to individual sub-resources. By default, the value is
* <code>true</code>. See <a href=
* "http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options"
* target="_blank">Configuration Advanced Options</a> for more
* information.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateElasticsearchDomainConfigRequest withAdvancedOptions(
java.util.Map<String, String> advancedOptions) {
setAdvancedOptions(advancedOptions);
return this;
}
public UpdateElasticsearchDomainConfigRequest addAdvancedOptionsEntry(
String key, String value) {
if (null == this.advancedOptions) {
this.advancedOptions = new java.util.HashMap<String, String>();
}
if (this.advancedOptions.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.advancedOptions.put(key, value);
return this;
}
/**
* Removes all the entries added into AdvancedOptions. <p> Returns a
* reference to this object so that method calls can be chained together.
*/
public UpdateElasticsearchDomainConfigRequest clearAdvancedOptionsEntries() {
this.advancedOptions = null;
return this;
}
/**
* <p>
* IAM access policy as a JSON-formatted string.
* </p>
*
* @param accessPolicies
* IAM access policy as a JSON-formatted string.
*/
public void setAccessPolicies(String accessPolicies) {
this.accessPolicies = accessPolicies;
}
/**
* <p>
* IAM access policy as a JSON-formatted string.
* </p>
*
* @return IAM access policy as a JSON-formatted string.
*/
public String getAccessPolicies() {
return this.accessPolicies;
}
/**
* <p>
* IAM access policy as a JSON-formatted string.
* </p>
*
* @param accessPolicies
* IAM access policy as a JSON-formatted string.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateElasticsearchDomainConfigRequest withAccessPolicies(
String accessPolicies) {
setAccessPolicies(accessPolicies);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDomainName() != null)
sb.append("DomainName: " + getDomainName() + ",");
if (getElasticsearchClusterConfig() != null)
sb.append("ElasticsearchClusterConfig: "
+ getElasticsearchClusterConfig() + ",");
if (getEBSOptions() != null)
sb.append("EBSOptions: " + getEBSOptions() + ",");
if (getSnapshotOptions() != null)
sb.append("SnapshotOptions: " + getSnapshotOptions() + ",");
if (getAdvancedOptions() != null)
sb.append("AdvancedOptions: " + getAdvancedOptions() + ",");
if (getAccessPolicies() != null)
sb.append("AccessPolicies: " + getAccessPolicies());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateElasticsearchDomainConfigRequest == false)
return false;
UpdateElasticsearchDomainConfigRequest other = (UpdateElasticsearchDomainConfigRequest) obj;
if (other.getDomainName() == null ^ this.getDomainName() == null)
return false;
if (other.getDomainName() != null
&& other.getDomainName().equals(this.getDomainName()) == false)
return false;
if (other.getElasticsearchClusterConfig() == null
^ this.getElasticsearchClusterConfig() == null)
return false;
if (other.getElasticsearchClusterConfig() != null
&& other.getElasticsearchClusterConfig().equals(
this.getElasticsearchClusterConfig()) == false)
return false;
if (other.getEBSOptions() == null ^ this.getEBSOptions() == null)
return false;
if (other.getEBSOptions() != null
&& other.getEBSOptions().equals(this.getEBSOptions()) == false)
return false;
if (other.getSnapshotOptions() == null
^ this.getSnapshotOptions() == null)
return false;
if (other.getSnapshotOptions() != null
&& other.getSnapshotOptions().equals(this.getSnapshotOptions()) == false)
return false;
if (other.getAdvancedOptions() == null
^ this.getAdvancedOptions() == null)
return false;
if (other.getAdvancedOptions() != null
&& other.getAdvancedOptions().equals(this.getAdvancedOptions()) == false)
return false;
if (other.getAccessPolicies() == null
^ this.getAccessPolicies() == null)
return false;
if (other.getAccessPolicies() != null
&& other.getAccessPolicies().equals(this.getAccessPolicies()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getDomainName() == null) ? 0 : getDomainName().hashCode());
hashCode = prime
* hashCode
+ ((getElasticsearchClusterConfig() == null) ? 0
: getElasticsearchClusterConfig().hashCode());
hashCode = prime * hashCode
+ ((getEBSOptions() == null) ? 0 : getEBSOptions().hashCode());
hashCode = prime
* hashCode
+ ((getSnapshotOptions() == null) ? 0 : getSnapshotOptions()
.hashCode());
hashCode = prime
* hashCode
+ ((getAdvancedOptions() == null) ? 0 : getAdvancedOptions()
.hashCode());
hashCode = prime
* hashCode
+ ((getAccessPolicies() == null) ? 0 : getAccessPolicies()
.hashCode());
return hashCode;
}
@Override
public UpdateElasticsearchDomainConfigRequest clone() {
return (UpdateElasticsearchDomainConfigRequest) super.clone();
}
} | apache-2.0 |
marcjansen/shogun2 | src/shogun2-core/src/main/java/de/terrestris/shogun2/converter/UserGroupIdResolver.java | 661 | package de.terrestris.shogun2.converter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import de.terrestris.shogun2.dao.UserGroupDao;
import de.terrestris.shogun2.model.UserGroup;
import de.terrestris.shogun2.service.UserGroupService;
/**
* @author Nils Buehner
*/
public class UserGroupIdResolver<E extends UserGroup, D extends UserGroupDao<E>, S extends UserGroupService<E, D>> extends
PersistentObjectIdResolver<E, D, S> {
@Override
@Autowired
@Qualifier("userGroupService")
public void setService(S service) {
this.service = service;
}
}
| apache-2.0 |
apache/incubator-asterixdb | asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractNumericArithmeticEval.java | 35013 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.evaluators.functions;
import java.io.DataOutput;
import org.apache.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADayTimeDurationSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AYearMonthDurationSerializerDeserializer;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.om.base.AMutableDate;
import org.apache.asterix.om.base.AMutableDateTime;
import org.apache.asterix.om.base.AMutableDouble;
import org.apache.asterix.om.base.AMutableDuration;
import org.apache.asterix.om.base.AMutableFloat;
import org.apache.asterix.om.base.AMutableInt16;
import org.apache.asterix.om.base.AMutableInt32;
import org.apache.asterix.om.base.AMutableInt64;
import org.apache.asterix.om.base.AMutableInt8;
import org.apache.asterix.om.base.AMutableTime;
import org.apache.asterix.om.base.ANull;
import org.apache.asterix.om.base.temporal.GregorianCalendarSystem;
import org.apache.asterix.om.exceptions.ExceptionUtil;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.EnumDeserializer;
import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import org.apache.asterix.runtime.exceptions.OverflowException;
import org.apache.asterix.runtime.exceptions.UnderflowException;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IPointable;
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public abstract class AbstractNumericArithmeticEval extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 527445160961348706L;
/**
* abstract method for arithmetic operation between two integer values
*
* @param lhs first operand
* @param rhs second operand
* @param result result holder
* @return {@code false} if the result is {@code NULL}, otherwise {@code true}
*/
protected abstract boolean evaluateInteger(long lhs, long rhs, AMutableInt64 result) throws HyracksDataException;
/**
* abstract method for arithmetic operation between two floating point values
*
* @param lhs first operand
* @param rhs second operand
* @param result result holder
* @return {@code false} if the result is {@code NULL}, otherwise {@code true}
*/
protected abstract boolean evaluateDouble(double lhs, double rhs, AMutableDouble result)
throws HyracksDataException;
/**
* abstract method for arithmetic operation between a time instance (date/time/datetime)
* and a duration (duration/year-month-duration/day-time-duration)
*
* @param chronon first operand
* @param yearMonth year-month component of the second operand
* @param dayTime day-time component of the second operand
* @param result result holder
* @param ctx evaluator context
* @return {@code false} if the result is {@code NULL}, otherwise {@code true}
*/
protected abstract boolean evaluateTimeDurationArithmetic(long chronon, int yearMonth, long dayTime,
boolean isTimeOnly, AMutableInt64 result, IEvaluatorContext ctx) throws HyracksDataException;
/**
* abstract method for arithmetic operation between two time instances (date/time/datetime)
*
* @param chronon0 first operand
* @param chronon1 second operand
* @param result result holder
* @param ctx evaluator context
* @return {@code false} if the result is {@code NULL}, otherwise {@code true}
*/
protected abstract boolean evaluateTimeInstanceArithmetic(long chronon0, long chronon1, AMutableInt64 result,
IEvaluatorContext ctx) throws HyracksDataException;
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(IEvaluatorContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private final DataOutput out = resultStorage.getDataOutput();
private final IPointable argPtr0 = new VoidPointable();
private final IPointable argPtr1 = new VoidPointable();
private final IScalarEvaluator evalLeft = args[0].createScalarEvaluator(ctx);
private final IScalarEvaluator evalRight = args[1].createScalarEvaluator(ctx);
private final double[] operandsFloating = new double[args.length];
private final long[] operandsInteger = new long[args.length];
private final AMutableFloat aFloat = new AMutableFloat(0);
private final AMutableDouble aDouble = new AMutableDouble(0);
private final AMutableInt64 aInt64 = new AMutableInt64(0);
private final AMutableInt32 aInt32 = new AMutableInt32(0);
private final AMutableInt16 aInt16 = new AMutableInt16((short) 0);
private final AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
private final AMutableDuration aDuration = new AMutableDuration(0, 0);
private final AMutableDate aDate = new AMutableDate(0);
private final AMutableTime aTime = new AMutableTime(0);
private final AMutableDateTime aDatetime = new AMutableDateTime(0);
private final FunctionIdentifier funID = getIdentifier();
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer int8Serde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT8);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer int16Serde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT16);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer int32Serde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer int64Serde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer floatSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AFLOAT);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer doubleSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer dateSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer timeSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer dateTimeSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer durationSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
@SuppressWarnings("rawtypes")
private final ISerializerDeserializer nullSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
private final byte[] EXP_TYPES = new byte[] { ATypeTag.SERIALIZED_INT8_TYPE_TAG,
ATypeTag.SERIALIZED_INT16_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG,
ATypeTag.SERIALIZED_INT64_TYPE_TAG, ATypeTag.SERIALIZED_FLOAT_TYPE_TAG,
ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG, ATypeTag.SERIALIZED_DATE_TYPE_TAG,
ATypeTag.SERIALIZED_TIME_TYPE_TAG, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG,
ATypeTag.SERIALIZED_DURATION_TYPE_TAG, ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG,
ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG };
@Override
@SuppressWarnings("unchecked")
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
evalLeft.evaluate(tuple, argPtr0);
evalRight.evaluate(tuple, argPtr1);
resultStorage.reset();
if (PointableHelper.checkAndSetMissingOrNull(result, argPtr0, argPtr1)) {
return;
}
ATypeTag argTypeMax = null;
for (int i = 0; i < 2; i++) {
IPointable argPtr = i == 0 ? argPtr0 : argPtr1;
byte[] bytes = argPtr.getByteArray();
int offset = argPtr.getStartOffset();
ATypeTag currentType;
ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[offset]);
switch (typeTag) {
case TINYINT:
currentType = ATypeTag.TINYINT;
operandsInteger[i] = AInt8SerializerDeserializer.getByte(bytes, offset + 1);
operandsFloating[i] = operandsInteger[i];
break;
case SMALLINT:
currentType = ATypeTag.SMALLINT;
operandsInteger[i] = AInt16SerializerDeserializer.getShort(bytes, offset + 1);
operandsFloating[i] = operandsInteger[i];
break;
case INTEGER:
currentType = ATypeTag.INTEGER;
operandsInteger[i] = AInt32SerializerDeserializer.getInt(bytes, offset + 1);
operandsFloating[i] = operandsInteger[i];
break;
case BIGINT:
currentType = ATypeTag.BIGINT;
operandsInteger[i] = AInt64SerializerDeserializer.getLong(bytes, offset + 1);
operandsFloating[i] = operandsInteger[i];
break;
case FLOAT:
currentType = ATypeTag.FLOAT;
operandsFloating[i] = AFloatSerializerDeserializer.getFloat(bytes, offset + 1);
break;
case DOUBLE:
currentType = ATypeTag.DOUBLE;
operandsFloating[i] = ADoubleSerializerDeserializer.getDouble(bytes, offset + 1);
break;
case DATE:
case TIME:
case DATETIME:
case DURATION:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
evaluateTemporalArithmeticOperation();
result.set(resultStorage);
return;
default:
ExceptionUtil.warnTypeMismatch(ctx, sourceLoc, funID, bytes[offset], i, EXP_TYPES);
PointableHelper.setNull(result);
return;
}
if (i == 0 || currentType.ordinal() > argTypeMax.ordinal()) {
argTypeMax = currentType;
}
}
ATypeTag resultType = getNumericResultType(argTypeMax);
long lres;
double dres;
switch (resultType) {
case TINYINT:
if (evaluateInteger(operandsInteger[0], operandsInteger[1], aInt64)) {
lres = aInt64.getLongValue();
if (lres > Byte.MAX_VALUE) {
throw new OverflowException(sourceLoc, getIdentifier());
}
if (lres < Byte.MIN_VALUE) {
throw new UnderflowException(sourceLoc, getIdentifier());
}
aInt8.setValue((byte) lres);
int8Serde.serialize(aInt8, out);
} else {
nullSerde.serialize(ANull.NULL, out);
}
break;
case SMALLINT:
if (evaluateInteger(operandsInteger[0], operandsInteger[1], aInt64)) {
lres = aInt64.getLongValue();
if (lres > Short.MAX_VALUE) {
throw new OverflowException(sourceLoc, getIdentifier());
}
if (lres < Short.MIN_VALUE) {
throw new UnderflowException(sourceLoc, getIdentifier());
}
aInt16.setValue((short) lres);
int16Serde.serialize(aInt16, out);
} else {
nullSerde.serialize(ANull.NULL, out);
}
break;
case INTEGER:
if (evaluateInteger(operandsInteger[0], operandsInteger[1], aInt64)) {
lres = aInt64.getLongValue();
if (lres > Integer.MAX_VALUE) {
throw new OverflowException(sourceLoc, getIdentifier());
}
if (lres < Integer.MIN_VALUE) {
throw new UnderflowException(sourceLoc, getIdentifier());
}
aInt32.setValue((int) lres);
int32Serde.serialize(aInt32, out);
} else {
nullSerde.serialize(ANull.NULL, out);
}
break;
case BIGINT:
if (evaluateInteger(operandsInteger[0], operandsInteger[1], aInt64)) {
int64Serde.serialize(aInt64, out);
} else {
nullSerde.serialize(ANull.NULL, out);
}
break;
case FLOAT:
if (evaluateDouble(operandsFloating[0], operandsFloating[1], aDouble)) {
dres = aDouble.getDoubleValue();
if (Double.isFinite(dres)) {
if (dres > Float.MAX_VALUE) {
throw new OverflowException(sourceLoc, getIdentifier());
}
if (dres < -Float.MAX_VALUE) {
throw new UnderflowException(sourceLoc, getIdentifier());
}
}
aFloat.setValue((float) dres);
floatSerde.serialize(aFloat, out);
} else {
nullSerde.serialize(ANull.NULL, out);
}
break;
case DOUBLE:
if (evaluateDouble(operandsFloating[0], operandsFloating[1], aDouble)) {
doubleSerde.serialize(aDouble, out);
} else {
nullSerde.serialize(ANull.NULL, out);
}
break;
}
result.set(resultStorage);
}
@SuppressWarnings("unchecked")
private void evaluateTemporalArithmeticOperation() throws HyracksDataException {
byte[] bytes1 = argPtr1.getByteArray();
int offset1 = argPtr1.getStartOffset();
ATypeTag rightType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes1[offset1]);
byte[] bytes0 = argPtr0.getByteArray();
int offset0 = argPtr0.getStartOffset();
ATypeTag leftType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes0[offset0]);
if (rightType == leftType) {
long leftChronon = 0, rightChronon = 0, dayTime = 0;
int yearMonth = 0;
boolean yearMonthIsNull = false, dayTimeIsNull = false;
switch (leftType) {
case DATE:
leftChronon = ADateSerializerDeserializer.getChronon(bytes0, offset0 + 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
rightChronon = ADateSerializerDeserializer.getChronon(bytes1, offset1 + 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
break;
case TIME:
leftChronon = ATimeSerializerDeserializer.getChronon(bytes0, offset0 + 1);
rightChronon = ATimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
case DATETIME:
leftChronon = ADateTimeSerializerDeserializer.getChronon(bytes0, offset0 + 1);
rightChronon = ADateTimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
case YEARMONTHDURATION:
if (evaluateTimeInstanceArithmetic(
AYearMonthDurationSerializerDeserializer.getYearMonth(bytes0, offset0 + 1),
AYearMonthDurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1),
aInt64, ctx)) {
yearMonth = (int) aInt64.getLongValue();
} else {
yearMonthIsNull = true;
}
break;
case DAYTIMEDURATION:
leftChronon =
ADayTimeDurationSerializerDeserializer.getDayTime(bytes0, offset0 + 1);
rightChronon =
ADayTimeDurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
break;
default:
ExceptionUtil.warnUnsupportedType(ctx, sourceLoc, funID.getName(), rightType);
nullSerde.serialize(ANull.NULL, out);
return;
}
if (evaluateTimeInstanceArithmetic(leftChronon, rightChronon, aInt64, ctx)) {
dayTime = aInt64.getLongValue();
} else {
dayTimeIsNull = true;
}
if (yearMonthIsNull || dayTimeIsNull) {
nullSerde.serialize(ANull.NULL, out);
} else {
aDuration.setValue(yearMonth, dayTime);
durationSerde.serialize(aDuration, out);
}
} else {
long chronon = 0, dayTime = 0;
int yearMonth = 0;
ATypeTag resultType = null;
ISerializerDeserializer serde = null;
boolean isTimeOnly = false;
switch (leftType) {
case TIME:
serde = timeSerde;
chronon = ATimeSerializerDeserializer.getChronon(bytes0, offset0 + 1);
isTimeOnly = true;
resultType = ATypeTag.TIME;
switch (rightType) {
case DAYTIMEDURATION:
dayTime = ADayTimeDurationSerializerDeserializer.getDayTime(bytes1,
offset1 + 1);
break;
case DURATION:
dayTime = ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
yearMonth =
ADurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1);
break;
default:
ExceptionUtil.warnIncompatibleType(ctx, sourceLoc, funID.getName(),
leftType, rightType);
nullSerde.serialize(ANull.NULL, out);
return;
}
break;
case DATE:
serde = dateSerde;
resultType = ATypeTag.DATE;
chronon = ADateSerializerDeserializer.getChronon(bytes0, offset0 + 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
case DATETIME:
if (leftType == ATypeTag.DATETIME) {
serde = dateTimeSerde;
resultType = ATypeTag.DATETIME;
chronon = ADateTimeSerializerDeserializer.getChronon(bytes0, offset0 + 1);
}
switch (rightType) {
case DURATION:
yearMonth =
ADurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1);
dayTime = ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
break;
case YEARMONTHDURATION:
yearMonth = AYearMonthDurationSerializerDeserializer.getYearMonth(bytes1,
offset1 + 1);
break;
case DAYTIMEDURATION:
dayTime = ADayTimeDurationSerializerDeserializer.getDayTime(bytes1,
offset1 + 1);
break;
default:
ExceptionUtil.warnIncompatibleType(ctx, sourceLoc, funID.getName(),
leftType, rightType);
nullSerde.serialize(ANull.NULL, out);
return;
}
break;
case YEARMONTHDURATION:
yearMonth =
AYearMonthDurationSerializerDeserializer.getYearMonth(bytes0, offset0 + 1);
switch (rightType) {
case DATETIME:
serde = dateTimeSerde;
resultType = ATypeTag.DATETIME;
chronon = ADateTimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
case DATE:
serde = dateSerde;
resultType = ATypeTag.DATE;
chronon = ADateSerializerDeserializer.getChronon(bytes1, offset1 + 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
break;
default:
ExceptionUtil.warnIncompatibleType(ctx, sourceLoc, funID.getName(),
leftType, rightType);
nullSerde.serialize(ANull.NULL, out);
return;
}
break;
case DURATION:
yearMonth = ADurationSerializerDeserializer.getYearMonth(bytes0, offset0 + 1);
dayTime = ADurationSerializerDeserializer.getDayTime(bytes0, offset0 + 1);
case DAYTIMEDURATION:
if (leftType == ATypeTag.DAYTIMEDURATION) {
dayTime =
ADayTimeDurationSerializerDeserializer.getDayTime(bytes0, offset0 + 1);
}
switch (rightType) {
case DATETIME:
serde = dateTimeSerde;
resultType = ATypeTag.DATETIME;
chronon = ADateTimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
case DATE:
serde = dateSerde;
resultType = ATypeTag.DATE;
chronon = ADateSerializerDeserializer.getChronon(bytes1, offset1 + 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
break;
case TIME:
if (yearMonth == 0) {
serde = timeSerde;
resultType = ATypeTag.TIME;
chronon = ATimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
isTimeOnly = true;
break;
}
default:
ExceptionUtil.warnIncompatibleType(ctx, sourceLoc, funID.getName(),
leftType, rightType);
nullSerde.serialize(ANull.NULL, out);
return;
}
break;
default:
ExceptionUtil.warnIncompatibleType(ctx, sourceLoc, funID.getName(), leftType,
rightType);
nullSerde.serialize(ANull.NULL, out);
return;
}
if (evaluateTimeDurationArithmetic(chronon, yearMonth, dayTime, isTimeOnly, aInt64, ctx)) {
chronon = aInt64.getLongValue();
switch (resultType) {
case DATE:
if (chronon < 0 && chronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
chronon = chronon / GregorianCalendarSystem.CHRONON_OF_DAY - 1;
} else {
chronon = chronon / GregorianCalendarSystem.CHRONON_OF_DAY;
}
aDate.setValue((int) chronon);
serde.serialize(aDate, out);
break;
case TIME:
aTime.setValue((int) chronon);
serde.serialize(aTime, out);
break;
case DATETIME:
aDatetime.setValue(chronon);
serde.serialize(aDatetime, out);
break;
default:
ExceptionUtil.warnIncompatibleType(ctx, sourceLoc, funID.getName(), leftType,
rightType);
nullSerde.serialize(ANull.NULL, out);
return;
}
} else {
nullSerde.serialize(ANull.NULL, out);
}
}
}
};
}
};
}
protected ATypeTag getNumericResultType(ATypeTag argTypeMax) {
return argTypeMax;
}
}
| apache-2.0 |
hexdecteam/stagemonitor | stagemonitor-requestmonitor/src/main/java/org/stagemonitor/requestmonitor/reporter/CallTreeExcludingPostExecutionInterceptor.java | 1061 | package org.stagemonitor.requestmonitor.reporter;
import org.stagemonitor.core.metrics.MetricUtils;
import org.stagemonitor.requestmonitor.RequestMonitorPlugin;
class CallTreeExcludingPostExecutionInterceptor extends PostExecutionRequestTraceReporterInterceptor {
@Override
public void interceptReport(PostExecutionInterceptorContext context) {
if (context.getRequestTrace().getCallStack() == null) {
context.addProperty("containsCallTree", false);
return;
} else {
context.addProperty("containsCallTree", true);
}
final double percentileLimit = context
.getConfig(RequestMonitorPlugin.class)
.getExcludeCallTreeFromElasticsearchReportWhenFasterThanXPercentOfRequests();
if (!MetricUtils.isFasterThanXPercentOfAllRequests(context.getRequestTrace().getExecutionTime(),
percentileLimit, context.getTimerForThisRequest())) {
exclude(context);
}
}
private void exclude(PostExecutionInterceptorContext context) {
context.addExcludedProperties("callStack", "callStackJson").addProperty("containsCallTree", false);
}
}
| apache-2.0 |
gdeignacio/sgtsic | sgtsic/src/main/java/es/caib/sgtsic/docx/OpenXMLDocument.java | 15614 | package es.caib.sgtsic.docx;
import com.google.common.base.Strings;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
//import org.apache.xerces.dom.NodeImpl;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Représente un document OpenXML en général. Cette classe encapsule toutes les
* méthodes qui permettent de manipuler un package de manière transparente.
*
* @author Julien Chable
* @version 0.2
*
*/
public class OpenXMLDocument {
protected Package container;
protected CorePropertiesHelper corePropertiesHelper;
/**
* Constructeur.
*
* @param docPackage
* Référence vers le package du document.
*/
public OpenXMLDocument(Package docPackage) {
container = docPackage;
corePropertiesHelper = new CorePropertiesHelper(container);
// On associe le CorePropertiesHelper de cette classe comme étant
// l'enregistreur des parties de type propriété du document.
container.addMarshaller(ContentTypeConstant.CORE_PROPERTIES,
corePropertiesHelper);
}
/**
* Extrait toutes les ressources du type spécifié et les place dans le
* répertoire cible.
*
* @param contentType
* Le type de contenu.
* @param destFolder
* Le répertoire cible.
*/
public void extractFiles(String contentType, File destFolder) {
if (!destFolder.isDirectory())
throw new IllegalArgumentException(
"Le paramètre desFolder doit être un répertoire !");
ArrayList<PackagePart> parts = new ArrayList<PackagePart>();
for (PackagePart part : container.getPartByContentType(contentType))
parts.add(part);
extractParts(parts, destFolder);
}
/**
* Extrait le contenu des parties spécifiées dans le répertoire cible.
*
* @param parts
* Les parties à extraire.
* @param destFolder
* Le répertoire de destination.
*/
public void extractParts(ArrayList<PackagePart> parts, File destFolder) {
for (PackagePart part : parts) {
String filename = PackageURIHelper.getFilename(part.getUri());
try {
InputStream ins = part.getInputStream();
FileOutputStream fw = new FileOutputStream(destFolder
.getAbsolutePath()
+ File.separator + filename);
byte[] buff = new byte[512];
while (ins.available() > 0) {
ins.read(buff);
fw.write(buff);
}
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Obtenir toutes les parties étant des images d'aperçus du document.
*/
public ArrayList<PackagePart> getThumbnails() {
return container
.getPartByRelationshipType(PackageRelationshipConstants.NS_THUMBNAIL_PART);
}
/**
* Ouvre un document.
*
* @param zipFile
* Le fichier Zip du document OpenXML.
* @param access
* Le mode d'accès au document.
*/
public static OpenXMLDocument open(ZipFile zipFile, PackageAccess access) {
return new OpenXMLDocument(Package.open(zipFile, access));
}
/**
* Sauvegarder l'intégralité du document dans le fichier de destination.
*/
public void save(File destFile) {
container.save(destFile);
}
/**
* Close the package to free the zip file created in the temporary folder.
* Added by: Willy Ekasalim - Allette Systems
*/
public void close(){
container.close();
}
public CoreProperties getCoreProperties() {
return corePropertiesHelper.getCoreProperties();
}
final class CorePropertiesHelper implements PartMarshaller {
private final static String NAMESPACE_DC_URI = "http://purl.org/dc/elements/1.1/";
private final static String NAMESPACE_CP_URI = "http://schemas.openxmlformats.org/package/2006/metadata/core-properties";
private final static String NAMESPACE_DCTERMS_URI = "http://purl.org/dc/terms/";
/**
* Référence vers le package.
*/
private Package container;
/**
* L'entrée Zip du fichier de propriétés du doccuments.
*/
private ZipEntry corePropertiesZipEntry;
/**
* Le bean des propriétés du document.
*/
private CoreProperties coreProperties;
/**
* L'arbre DOM des propriétés du document (sert à l'enregistrement)
*/
private Document xmlDoc;
public CorePropertiesHelper(Package container) {
this.container = container;
coreProperties = parseCorePropertiesFile();
}
/**
* Parse le fichier de propriétés du document.
*
* @return
*/
private CoreProperties parseCorePropertiesFile() {
CoreProperties coreProps = new CoreProperties();
corePropertiesZipEntry = getCorePropertiesZipEntry();
InputStream inStream = null;
try {
inStream = container.getArchive().getInputStream(
corePropertiesZipEntry);
} catch (IOException e) {
throw new InvalidFormatException(
"Impossible de lire le fichier de properiétés "
+ corePropertiesZipEntry.getName());
} catch (Exception e){
//Willy: core properties not always present in the file?
return null;
}
// Création du parser DOM
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory
.newInstance();
documentBuilderFactory.setNamespaceAware(true);
documentBuilderFactory.setIgnoringElementContentWhitespace(true);
DocumentBuilder documentBuilder;
try {
documentBuilder = documentBuilderFactory.newDocumentBuilder();
// On parse le document XML en arbre DOM
xmlDoc = documentBuilder.parse(inStream);
// Créateur
NodeList creators = xmlDoc.getElementsByTagNameNS(NAMESPACE_DC_URI, "creator");
if (creators != null && creators.item(0) != null)
coreProps.setCreator(getTextContent(creators.item(0)));
// Titre
NodeList titles = xmlDoc.getElementsByTagNameNS(
NAMESPACE_DC_URI, "title");
if (titles != null && titles.item(0) != null)
coreProps.setTitle(getTextContent(titles.item(0)));
// Sujet
NodeList subjects = xmlDoc.getElementsByTagNameNS(
NAMESPACE_DC_URI, "subject");
if (subjects != null && subjects.item(0) != null)
coreProps.setSubject(getTextContent(subjects.item(0)));
// Mots clé
NodeList keywords = xmlDoc.getElementsByTagNameNS(
NAMESPACE_CP_URI, "keywords");
if (keywords != null & keywords.item(0) != null)
coreProps.setKeywords(getTextContent(keywords.item(0)));
// Description
NodeList descriptions = xmlDoc.getElementsByTagNameNS(
NAMESPACE_DC_URI, "description");
if (descriptions != null && descriptions.item(0) != null)
coreProps.setDescription(
getTextContent(descriptions.item(0)));
// Dernier personne à avoir modifié le document
NodeList lastModicationPersons = xmlDoc.getElementsByTagNameNS(
NAMESPACE_CP_URI, "lastModifiedBy");
if (lastModicationPersons != null
&& lastModicationPersons.item(0) != null)
coreProps.setLastModifiedBy(getTextContent(lastModicationPersons.item(0)));
// Revision
NodeList revisions = xmlDoc.getElementsByTagNameNS(
NAMESPACE_CP_URI, "revision");
if (revisions != null && revisions.item(0) != null)
coreProps.setRevision(getTextContent(revisions.item(0)));
// Date de création
NodeList created = xmlDoc.getElementsByTagNameNS(
NAMESPACE_DCTERMS_URI, "created");
if (created != null && created.item(0) != null)
coreProps.setCreated(getTextContent(created.item(0)));
// Date de modification
NodeList modifies = xmlDoc.getElementsByTagNameNS(
NAMESPACE_DCTERMS_URI, "modified");
if (modifies != null && modifies.item(0) != null)
coreProps.setModified(getTextContent(modifies.item(0)));
} catch (Exception e) {
e.printStackTrace();
return null;
}
return coreProps;
}
/**
* Sauvegarde le fichier de propriétés du document dans le flux
* spécifié.
*
* @param out
* Flux de sortie.
*/
public void marshall(PackagePart part, OutputStream os) {
if (!(os instanceof ZipOutputStream))
throw new IllegalArgumentException(
"Le flux doit être un ZipOutputSTream !");
ZipOutputStream out = (ZipOutputStream) os;
// Créateur
setTextContent(coreProperties.getCreator(), xmlDoc.getElementsByTagNameNS(NAMESPACE_DC_URI, "creator").item(0));
// Titre
setTextContent(coreProperties.getTitle(), xmlDoc.getElementsByTagNameNS(NAMESPACE_DC_URI, "title").item(0));
// Sujet
setTextContent(coreProperties.getSubject(), xmlDoc.getElementsByTagNameNS(NAMESPACE_DC_URI, "subject").item(0));
// Mots clé
setTextContent(coreProperties.getKeywords(), xmlDoc.getElementsByTagNameNS(NAMESPACE_CP_URI, "keywords").item(0));
// Description
setTextContent(coreProperties.getDescription(), xmlDoc.getElementsByTagNameNS(NAMESPACE_DC_URI, "description").item(0));
// Dernier personne à avoir modifié le document
setTextContent(coreProperties.getLastModifiedBy(), xmlDoc.getElementsByTagNameNS(NAMESPACE_CP_URI, "lastModifiedBy").item(0));
// Revision
setTextContent(coreProperties.getRevision(), xmlDoc.getElementsByTagNameNS(NAMESPACE_CP_URI, "revision").item(0));
// Date de création
setTextContent(coreProperties.getCreated(), xmlDoc.getElementsByTagNameNS(NAMESPACE_DCTERMS_URI, "created").item(0));
// Date de modification
setTextContent(coreProperties.getModified(), xmlDoc.getElementsByTagNameNS(NAMESPACE_DCTERMS_URI, "modified").item(0));
xmlDoc.normalize();
// Enregistrement de la partie dans le zip
ZipEntry ctEntry = new ZipEntry(corePropertiesZipEntry.getName());
try {
// Création de l'entrée dans le fichier ZIP
out.putNextEntry(ctEntry);
DOMSource source = new DOMSource(xmlDoc);
StreamResult result = new StreamResult(out);
TransformerFactory transFactory = TransformerFactory
.newInstance();
try {
Transformer transformer = transFactory.newTransformer();
transformer.setOutputProperty("indent", "yes");
transformer.transform(source, result);
} catch (TransformerException e) {
System.err
.println("Echec de l'enregistrement : impossible de créer le fichier "
+ corePropertiesZipEntry.getName());
}
// Fermeture de l'entrée du fichier ZIP
out.closeEntry();
} catch (IOException e1) {
System.err.println("");
}
}
/* Accesseurs */
public Package getContainer() {
return container;
}
public CoreProperties getCoreProperties() {
return coreProperties;
}
/**
* Récupérer l'entrée Zip du fichier de propriété du document.
*/
private ZipEntry getCorePropertiesZipEntry() {
PackageRelationship corePropsRel = container
.getRelationshipsByType(
PackageRelationshipConstants.NS_CORE_PROPERTIES)
.getRelationship(0);
if (corePropsRel == null)
return null;
return new ZipEntry(corePropsRel.getTargetUri().getPath());
}
}
// Para solucionar un problema de compatibilidad con JBOSS para el parseo de
// XML hemos tenido que incluir la implementacion de los métodos que hay a
// continuacion, ya que la libreria que hay en JBOSS está sobre escrita y
// no los implementa.
// Se ha tenido que modificar metodos de esta clase para que usen los añadi-
// dos.
//
// Para compilar, se han añadido las librerias de JBOSS en el proyecto SICIE-registro-web
// - $JBOSS_HOME/lib/endorsed/xercesImpl.jar
// - $JBOSS_HOME/lib/endorsed//xml-apis.jar
// indicando que no se empaqueten.
// internal method returning whether to take the given node's text content
private static final boolean hasTextContent(Node child) {
String nodeValue = null;
if (child != null) nodeValue = child.getNodeValue();
if (nodeValue != null) nodeValue = nodeValue.trim();
else nodeValue = ""; // BitCadena.CADENA_VACIA;
return child.getNodeType() != Node.COMMENT_NODE &&
child.getNodeType() != Node.PROCESSING_INSTRUCTION_NODE &&
(child.getNodeType() != Node.TEXT_NODE || Strings.isNullOrEmpty(nodeValue));
// !BitCadena.esCadenaVacia(nodeValue));
/*(child.getNodeType() != Node.TEXT_NODE ||
((TextImpl) child).isIgnorableWhitespace() == false);*/
}
private static String getChildTextContent(Node child){
if (child.getClass().getName().equals("org.apache.xerces.dom.DeferredTextImpl")){
try {
java.lang.reflect.Method m = child.getClass().getMethod("getTextContent", (Class[]) null);
Object textContent = m.invoke(child, (Object[])null);
return hasTextContent(child) ? textContent.toString() : "";
} catch (Exception ex) {
Logger.getLogger(OpenXMLDocument.class.getName()).log(Level.SEVERE, null, ex);
}
}
else return hasTextContent(child) ? child.getTextContent() : "";
return "";
}
/*
* Get Node text content
* @since DOM Level 3
*/
public static String getTextContent(Node node) throws DOMException {
//org.apache.xerces.dom.DeferredTextImpl var;
StringBuffer fBufferStr = null;
Node child = node.getFirstChild();
if (child != null) {
Node next = child.getNextSibling();
if (next == null) {
return hasTextContent(child) ? getChildTextContent(child) : "";
//return hasTextContent(child) ? ((NodeImpl) child).getTextContent() : "";
}
if (fBufferStr == null){
fBufferStr = new StringBuffer();
}
else {
fBufferStr.setLength(0);
}
getTextContent(fBufferStr, node);
return fBufferStr.toString();
}
return "";
}
// internal method taking a StringBuffer in parameter
private static void getTextContent(StringBuffer buf, Node node) throws DOMException {
Node child = node.getFirstChild();
while (child != null) {
if (hasTextContent(child)) {
getChildTextContent(child);
//((NodeImpl) child).getTextContent();
}
child = child.getNextSibling();
}
}
/*
* Set Node text content
* @since DOM Level 3
*/
public static void setTextContent(String textContent, Node node)
throws DOMException {
// get rid of any existing children
Node child;
while ((child = node.getFirstChild()) != null) {
node.removeChild(child);
}
// create a Text node to hold the given content
if (textContent != null && textContent.length() != 0){
node.appendChild(node.getOwnerDocument().createTextNode(textContent));
}
}
} | apache-2.0 |
google/agera | agera/src/main/java/com/google/android/agera/RepositoryCompiler.java | 13026 | /*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.agera;
import static com.google.android.agera.Common.NULL_OPERATOR;
import static com.google.android.agera.CompiledRepository.addBindWith;
import static com.google.android.agera.CompiledRepository.addCheck;
import static com.google.android.agera.CompiledRepository.addEnd;
import static com.google.android.agera.CompiledRepository.addFilterFailure;
import static com.google.android.agera.CompiledRepository.addFilterSuccess;
import static com.google.android.agera.CompiledRepository.addGetFrom;
import static com.google.android.agera.CompiledRepository.addGoLazy;
import static com.google.android.agera.CompiledRepository.addGoTo;
import static com.google.android.agera.CompiledRepository.addMergeIn;
import static com.google.android.agera.CompiledRepository.addSendTo;
import static com.google.android.agera.CompiledRepository.addTransform;
import static com.google.android.agera.CompiledRepository.compiledRepository;
import static com.google.android.agera.Functions.identityFunction;
import static com.google.android.agera.Mergers.objectsUnequal;
import static com.google.android.agera.Preconditions.checkNotNull;
import static com.google.android.agera.Preconditions.checkState;
import android.os.Looper;
import android.support.annotation.IntDef;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.concurrent.Executor;
@SuppressWarnings({"unchecked, rawtypes"})
final class RepositoryCompiler implements
RepositoryCompilerStates.RFrequency,
RepositoryCompilerStates.RFlow,
RepositoryCompilerStates.RTerminationOrContinue,
RepositoryCompilerStates.RConfig {
private static final ThreadLocal<RepositoryCompiler> compilers = new ThreadLocal<>();
@NonNull
static <TVal> RepositoryCompilerStates.REventSource<TVal, TVal> repositoryWithInitialValue(
@NonNull final TVal initialValue) {
checkNotNull(Looper.myLooper());
RepositoryCompiler compiler = compilers.get();
if (compiler == null) {
compiler = new RepositoryCompiler();
} else {
// Remove compiler from the ThreadLocal to prevent reuse in the middle of a compilation.
// recycle(), called by compile(), will return the compiler here. ThreadLocal.set(null) keeps
// the entry (with a null value) whereas remove() removes the entry; because we expect the
// return of the compiler, don't use the heavier remove().
compilers.set(null);
}
return compiler.start(initialValue);
}
private static void recycle(@NonNull final RepositoryCompiler compiler) {
compilers.set(compiler);
}
@Retention(RetentionPolicy.SOURCE)
@IntDef({NOTHING, FIRST_EVENT_SOURCE, FREQUENCY_OR_MORE_EVENT_SOURCE, FLOW,
TERMINATE_THEN_FLOW, TERMINATE_THEN_END, CONFIG})
private @interface Expect {}
private static final int NOTHING = 0;
private static final int FIRST_EVENT_SOURCE = 1;
private static final int FREQUENCY_OR_MORE_EVENT_SOURCE = 2;
private static final int FLOW = 3;
private static final int TERMINATE_THEN_FLOW = 4;
private static final int TERMINATE_THEN_END = 5;
private static final int CONFIG = 6;
private Object initialValue;
private final ArrayList<Observable> eventSources = new ArrayList<>();
private int frequency;
private final ArrayList<Object> directives = new ArrayList<>();
// 2x fields below: store caseExtractor and casePredicate for check(caseExtractor, casePredicate)
// for use in terminate(); if null then terminate() is terminating an attempt directive.
private Function caseExtractor;
private Predicate casePredicate;
private boolean goLazyUsed;
private Merger notifyChecker = objectsUnequal();
@RepositoryConfig
private int deactivationConfig;
@RepositoryConfig
private int concurrentUpdateConfig;
@NonNull
private Receiver discardedValueDisposer = NULL_OPERATOR;
@Expect
private int expect;
private RepositoryCompiler() {}
@NonNull
private RepositoryCompiler start(@NonNull final Object initialValue) {
checkExpect(NOTHING);
expect = FIRST_EVENT_SOURCE;
this.initialValue = initialValue;
return this;
}
private void checkExpect(@Expect final int accept) {
checkState(expect == accept, "Unexpected compiler state");
}
private void checkExpect(@Expect final int accept1, @Expect final int accept2) {
checkState(expect == accept1 || expect == accept2, "Unexpected compiler state");
}
private void checkGoLazyUnused() {
checkState(!goLazyUsed, "Unexpected occurrence of async directive after goLazy()");
}
//region REventSource
@NonNull
@Override
public RepositoryCompiler observe(@NonNull final Observable... observables) {
checkExpect(FIRST_EVENT_SOURCE, FREQUENCY_OR_MORE_EVENT_SOURCE);
for (Observable observable : observables) {
eventSources.add(checkNotNull(observable));
}
expect = FREQUENCY_OR_MORE_EVENT_SOURCE;
return this;
}
//endregion REventSource
//region RFrequency
@NonNull
@Override
public RepositoryCompiler onUpdatesPer(int millis) {
checkExpect(FREQUENCY_OR_MORE_EVENT_SOURCE);
frequency = Math.max(0, millis);
expect = FLOW;
return this;
}
@NonNull
@Override
public RepositoryCompiler onUpdatesPerLoop() {
return onUpdatesPer(0);
}
//endregion RFrequency
//region RSyncFlow
@NonNull
@Override
public RepositoryCompiler getFrom(@NonNull final Supplier supplier) {
checkExpect(FLOW);
addGetFrom(supplier, directives);
return this;
}
@NonNull
@Override
public RepositoryCompiler mergeIn(@NonNull final Supplier supplier,
@NonNull final Merger merger) {
checkExpect(FLOW);
addMergeIn(supplier, merger, directives);
return this;
}
@NonNull
@Override
public RepositoryCompiler transform(@NonNull final Function function) {
checkExpect(FLOW);
addTransform(function, directives);
return this;
}
@NonNull
@Override
public RepositoryCompiler check(@NonNull final Predicate predicate) {
return check(identityFunction(), predicate);
}
@NonNull
@Override
public RepositoryCompiler check(
@NonNull final Function function, @NonNull final Predicate predicate) {
checkExpect(FLOW);
caseExtractor = checkNotNull(function);
casePredicate = checkNotNull(predicate);
expect = TERMINATE_THEN_FLOW;
return this;
}
@NonNull
@Override
public RepositoryCompiler sendTo(@NonNull final Receiver receiver) {
checkExpect(FLOW);
addSendTo(checkNotNull(receiver), directives);
return this;
}
@NonNull
@Override
public RepositoryCompiler bindWith(@NonNull final Supplier secondValueSupplier,
@NonNull final Binder binder) {
checkExpect(FLOW);
addBindWith(secondValueSupplier, binder, directives);
return this;
}
@NonNull
@Override
public RepositoryCompiler thenSkip() {
endFlow(true);
return this;
}
@NonNull
@Override
public RepositoryCompiler thenGetFrom(@NonNull final Supplier supplier) {
getFrom(supplier);
endFlow(false);
return this;
}
@NonNull
@Override
public RepositoryCompiler thenMergeIn(
@NonNull final Supplier supplier, @NonNull final Merger merger) {
mergeIn(supplier, merger);
endFlow(false);
return this;
}
@NonNull
@Override
public RepositoryCompiler thenTransform(@NonNull final Function function) {
transform(function);
endFlow(false);
return this;
}
private void endFlow(final boolean skip) {
addEnd(skip, directives);
expect = CONFIG;
}
@NonNull
@Override
public RepositoryCompiler attemptGetFrom(@NonNull final Supplier attemptSupplier) {
getFrom(attemptSupplier);
expect = TERMINATE_THEN_FLOW;
return this;
}
@NonNull
@Override
public RepositoryCompiler attemptMergeIn(
@NonNull final Supplier supplier, @NonNull final Merger attemptMerger) {
mergeIn(supplier, attemptMerger);
expect = TERMINATE_THEN_FLOW;
return this;
}
@NonNull
@Override
public RepositoryCompiler attemptTransform(@NonNull final Function attemptFunction) {
transform(attemptFunction);
expect = TERMINATE_THEN_FLOW;
return this;
}
@NonNull
@Override
public RepositoryCompiler thenAttemptGetFrom(@NonNull final Supplier attemptSupplier) {
getFrom(attemptSupplier);
expect = TERMINATE_THEN_END;
return this;
}
@NonNull
@Override
public RepositoryCompiler thenAttemptMergeIn(
@NonNull final Supplier supplier, @NonNull final Merger attemptMerger) {
mergeIn(supplier, attemptMerger);
expect = TERMINATE_THEN_END;
return this;
}
@NonNull
@Override
public RepositoryCompiler thenAttemptTransform(@NonNull final Function attemptFunction) {
transform(attemptFunction);
expect = TERMINATE_THEN_END;
return this;
}
//endregion RSyncFlow
//region RFlow
@NonNull
@Override
public RepositoryCompiler goTo(@NonNull final Executor executor) {
checkExpect(FLOW);
checkGoLazyUnused();
addGoTo(executor, directives);
return this;
}
@NonNull
@Override
public RepositoryCompiler goLazy() {
checkExpect(FLOW);
checkGoLazyUnused();
addGoLazy(directives);
goLazyUsed = true;
return this;
}
//endregion RFlow
//region RTermination
@NonNull
@Override
public RepositoryCompiler orSkip() {
terminate(null);
return this;
}
@NonNull
@Override
public RepositoryCompiler orEnd(@NonNull final Function valueFunction) {
terminate(valueFunction);
return this;
}
private void terminate(@Nullable final Function valueFunction) {
checkExpect(TERMINATE_THEN_FLOW, TERMINATE_THEN_END);
if (caseExtractor != null) {
addCheck(caseExtractor, checkNotNull(casePredicate), valueFunction, directives);
} else {
addFilterSuccess(valueFunction, directives);
}
caseExtractor = null;
casePredicate = null;
if (expect == TERMINATE_THEN_END) {
endFlow(false);
} else {
expect = FLOW;
}
}
@NonNull
@Override
public RepositoryCompiler orContinue() {
checkExpect(TERMINATE_THEN_END);
addFilterFailure(directives);
expect = FLOW;
return this;
}
//endregion RTermination
//region RConfig
@NonNull
@Override
public RepositoryCompiler notifyIf(@NonNull final Merger notifyChecker) {
checkExpect(CONFIG);
this.notifyChecker = checkNotNull(notifyChecker);
return this;
}
@NonNull
@Override
public RepositoryCompiler onDeactivation(@RepositoryConfig final int deactivationConfig) {
checkExpect(CONFIG);
this.deactivationConfig = deactivationConfig;
return this;
}
@NonNull
@Override
public RepositoryCompiler onConcurrentUpdate(@RepositoryConfig final int concurrentUpdateConfig) {
checkExpect(CONFIG);
this.concurrentUpdateConfig = concurrentUpdateConfig;
return this;
}
@NonNull
@Override
public RepositoryCompiler sendDiscardedValuesTo(@NonNull final Receiver disposer) {
checkExpect(CONFIG);
discardedValueDisposer = checkNotNull(disposer);
return this;
}
@NonNull
@Override
public Repository compile() {
Repository repository = compileRepositoryAndReset();
recycle(this);
return repository;
}
@NonNull
@Override
public RepositoryCompiler compileIntoRepositoryWithInitialValue(@NonNull final Object value) {
Repository repository = compileRepositoryAndReset();
// Don't recycle, instead sneak in the first directive and start the second repository
addGetFrom(repository, directives);
return start(value).observe(repository);
}
@NonNull
private Repository compileRepositoryAndReset() {
checkExpect(CONFIG);
Repository repository = compiledRepository(initialValue, eventSources, frequency, directives,
notifyChecker, concurrentUpdateConfig, deactivationConfig, discardedValueDisposer);
expect = NOTHING;
initialValue = null;
eventSources.clear();
frequency = 0;
directives.clear();
goLazyUsed = false;
notifyChecker = objectsUnequal();
deactivationConfig = RepositoryConfig.CONTINUE_FLOW;
concurrentUpdateConfig = RepositoryConfig.CONTINUE_FLOW;
discardedValueDisposer = NULL_OPERATOR;
return repository;
}
//endregion RConfig
}
| apache-2.0 |
Fabryprog/camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/FromFtpMoveFileToHiddenFolderRecursiveNotStepwiseTest.java | 1293 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.remote;
/**
* Unit test based on end user problem with SFTP on Windows
*/
public class FromFtpMoveFileToHiddenFolderRecursiveNotStepwiseTest extends FromFtpMoveFileToHiddenFolderRecursiveTest {
protected String getFtpUrl() {
return "ftp://admin@localhost:" + getPort() + "?password=admin&recursive=true&binary=false"
+ "&move=${file:parent}/.done/${file:onlyname}&initialDelay=3000&delay=5000&stepwise=false";
}
} | apache-2.0 |
abhijitvalluri/fitnotifications | icu4j/src/main/java/com/ibm/icu/impl/coll/CollationData.java | 21480 | // © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/*
*******************************************************************************
* Copyright (C) 2010-2015, International Business Machines
* Corporation and others. All Rights Reserved.
*******************************************************************************
* CollationData.java, ported from collationdata.h/.cpp
*
* C++ version created on: 2010oct27
* created by: Markus W. Scherer
*/
package com.ibm.icu.impl.coll;
import com.ibm.icu.impl.Normalizer2Impl;
import com.ibm.icu.impl.Trie2_32;
import com.ibm.icu.lang.UScript;
import com.ibm.icu.text.Collator;
import com.ibm.icu.text.UnicodeSet;
import com.ibm.icu.util.ICUException;
/**
* Collation data container.
* Immutable data created by a CollationDataBuilder, or loaded from a file,
* or deserialized from API-provided binary data.
*
* Includes data for the collation base (root/default), aliased if this is not the base.
*/
public final class CollationData {
// Note: The ucadata.icu loader could discover the reserved ranges by setting an array
// parallel with the ranges, and resetting ranges that are indexed.
// The reordering builder code could clone the resulting template array.
static final int REORDER_RESERVED_BEFORE_LATIN = Collator.ReorderCodes.FIRST + 14;
static final int REORDER_RESERVED_AFTER_LATIN = Collator.ReorderCodes.FIRST + 15;
static final int MAX_NUM_SPECIAL_REORDER_CODES = 8;
CollationData(Normalizer2Impl nfc) {
nfcImpl = nfc;
}
public int getCE32(int c) {
return trie.get(c);
}
int getCE32FromSupplementary(int c) {
return trie.get(c); // TODO: port UTRIE2_GET32_FROM_SUPP(trie, c) to Java?
}
boolean isDigit(int c) {
return c < 0x660 ? c <= 0x39 && 0x30 <= c :
Collation.hasCE32Tag(getCE32(c), Collation.DIGIT_TAG);
}
public boolean isUnsafeBackward(int c, boolean numeric) {
return unsafeBackwardSet.contains(c) || (numeric && isDigit(c));
}
public boolean isCompressibleLeadByte(int b) {
return compressibleBytes[b];
}
public boolean isCompressiblePrimary(long p) {
return isCompressibleLeadByte((int)p >>> 24);
}
/**
* Returns the CE32 from two contexts words.
* Access to the defaultCE32 for contraction and prefix matching.
*/
int getCE32FromContexts(int index) {
return ((int)contexts.charAt(index) << 16) | contexts.charAt(index + 1);
}
/**
* Returns the CE32 for an indirect special CE32 (e.g., with DIGIT_TAG).
* Requires that ce32 is special.
*/
int getIndirectCE32(int ce32) {
assert(Collation.isSpecialCE32(ce32));
int tag = Collation.tagFromCE32(ce32);
if(tag == Collation.DIGIT_TAG) {
// Fetch the non-numeric-collation CE32.
ce32 = ce32s[Collation.indexFromCE32(ce32)];
} else if(tag == Collation.LEAD_SURROGATE_TAG) {
ce32 = Collation.UNASSIGNED_CE32;
} else if(tag == Collation.U0000_TAG) {
// Fetch the normal ce32 for U+0000.
ce32 = ce32s[0];
}
return ce32;
}
/**
* Returns the CE32 for an indirect special CE32 (e.g., with DIGIT_TAG),
* if ce32 is special.
*/
int getFinalCE32(int ce32) {
if(Collation.isSpecialCE32(ce32)) {
ce32 = getIndirectCE32(ce32);
}
return ce32;
}
/**
* Computes a CE from c's ce32 which has the OFFSET_TAG.
*/
long getCEFromOffsetCE32(int c, int ce32) {
long dataCE = ces[Collation.indexFromCE32(ce32)];
return Collation.makeCE(Collation.getThreeBytePrimaryForOffsetData(c, dataCE));
}
/**
* Returns the single CE that c maps to.
* Throws UnsupportedOperationException if c does not map to a single CE.
*/
long getSingleCE(int c) {
CollationData d;
int ce32 = getCE32(c);
if(ce32 == Collation.FALLBACK_CE32) {
d = base;
ce32 = base.getCE32(c);
} else {
d = this;
}
while(Collation.isSpecialCE32(ce32)) {
switch(Collation.tagFromCE32(ce32)) {
case Collation.LATIN_EXPANSION_TAG:
case Collation.BUILDER_DATA_TAG:
case Collation.PREFIX_TAG:
case Collation.CONTRACTION_TAG:
case Collation.HANGUL_TAG:
case Collation.LEAD_SURROGATE_TAG:
throw new UnsupportedOperationException(String.format(
"there is not exactly one collation element for U+%04X (CE32 0x%08x)",
c, ce32));
case Collation.FALLBACK_TAG:
case Collation.RESERVED_TAG_3:
throw new AssertionError(String.format(
"unexpected CE32 tag for U+%04X (CE32 0x%08x)", c, ce32));
case Collation.LONG_PRIMARY_TAG:
return Collation.ceFromLongPrimaryCE32(ce32);
case Collation.LONG_SECONDARY_TAG:
return Collation.ceFromLongSecondaryCE32(ce32);
case Collation.EXPANSION32_TAG:
if(Collation.lengthFromCE32(ce32) == 1) {
ce32 = d.ce32s[Collation.indexFromCE32(ce32)];
break;
} else {
throw new UnsupportedOperationException(String.format(
"there is not exactly one collation element for U+%04X (CE32 0x%08x)",
c, ce32));
}
case Collation.EXPANSION_TAG: {
if(Collation.lengthFromCE32(ce32) == 1) {
return d.ces[Collation.indexFromCE32(ce32)];
} else {
throw new UnsupportedOperationException(String.format(
"there is not exactly one collation element for U+%04X (CE32 0x%08x)",
c, ce32));
}
}
case Collation.DIGIT_TAG:
// Fetch the non-numeric-collation CE32 and continue.
ce32 = d.ce32s[Collation.indexFromCE32(ce32)];
break;
case Collation.U0000_TAG:
assert(c == 0);
// Fetch the normal ce32 for U+0000 and continue.
ce32 = d.ce32s[0];
break;
case Collation.OFFSET_TAG:
return d.getCEFromOffsetCE32(c, ce32);
case Collation.IMPLICIT_TAG:
return Collation.unassignedCEFromCodePoint(c);
}
}
return Collation.ceFromSimpleCE32(ce32);
}
/**
* Returns the FCD16 value for code point c. c must be >= 0.
*/
int getFCD16(int c) {
return nfcImpl.getFCD16(c);
}
/**
* Returns the first primary for the script's reordering group.
* @return the primary with only the first primary lead byte of the group
* (not necessarily an actual root collator primary weight),
* or 0 if the script is unknown
*/
long getFirstPrimaryForGroup(int script) {
int index = getScriptIndex(script);
return index == 0 ? 0 : (long)scriptStarts[index] << 16;
}
/**
* Returns the last primary for the script's reordering group.
* @return the last primary of the group
* (not an actual root collator primary weight),
* or 0 if the script is unknown
*/
public long getLastPrimaryForGroup(int script) {
int index = getScriptIndex(script);
if(index == 0) {
return 0;
}
long limit = scriptStarts[index + 1];
return (limit << 16) - 1;
}
/**
* Finds the reordering group which contains the primary weight.
* @return the first script of the group, or -1 if the weight is beyond the last group
*/
public int getGroupForPrimary(long p) {
p >>= 16;
if(p < scriptStarts[1] || scriptStarts[scriptStarts.length - 1] <= p) {
return -1;
}
int index = 1;
while(p >= scriptStarts[index + 1]) { ++index; }
for(int i = 0; i < numScripts; ++i) {
if(scriptsIndex[i] == index) {
return i;
}
}
for(int i = 0; i < MAX_NUM_SPECIAL_REORDER_CODES; ++i) {
if(scriptsIndex[numScripts + i] == index) {
return Collator.ReorderCodes.FIRST + i;
}
}
return -1;
}
private int getScriptIndex(int script) {
if(script < 0) {
return 0;
} else if(script < numScripts) {
return scriptsIndex[script];
} else if(script < Collator.ReorderCodes.FIRST) {
return 0;
} else {
script -= Collator.ReorderCodes.FIRST;
if(script < MAX_NUM_SPECIAL_REORDER_CODES) {
return scriptsIndex[numScripts + script];
} else {
return 0;
}
}
}
public int[] getEquivalentScripts(int script) {
int index = getScriptIndex(script);
if(index == 0) { return EMPTY_INT_ARRAY; }
if(script >= Collator.ReorderCodes.FIRST) {
// Special groups have no aliases.
return new int[] { script };
}
int length = 0;
for(int i = 0; i < numScripts; ++i) {
if(scriptsIndex[i] == index) {
++length;
}
}
int[] dest = new int[length];
if(length == 1) {
dest[0] = script;
return dest;
}
length = 0;
for(int i = 0; i < numScripts; ++i) {
if(scriptsIndex[i] == index) {
dest[length++] = i;
}
}
return dest;
}
/**
* Writes the permutation of primary-weight ranges
* for the given reordering of scripts and groups.
* The caller checks for illegal arguments and
* takes care of [DEFAULT] and memory allocation.
*
* <p>Each list element will be a (limit, offset) pair as described
* for the CollationSettings.reorderRanges.
* The list will be empty if no ranges are reordered.
*/
void makeReorderRanges(int[] reorder, UVector32 ranges) {
makeReorderRanges(reorder, false, ranges);
}
private void makeReorderRanges(int[] reorder, boolean latinMustMove, UVector32 ranges) {
ranges.removeAllElements();
int length = reorder.length;
if(length == 0 || (length == 1 && reorder[0] == UScript.UNKNOWN)) {
return;
}
// Maps each script-or-group range to a new lead byte.
short[] table = new short[scriptStarts.length - 1]; // C++: uint8_t[]
{
// Set "don't care" values for reserved ranges.
int index = scriptsIndex[
numScripts + REORDER_RESERVED_BEFORE_LATIN - Collator.ReorderCodes.FIRST];
if(index != 0) {
table[index] = 0xff;
}
index = scriptsIndex[
numScripts + REORDER_RESERVED_AFTER_LATIN - Collator.ReorderCodes.FIRST];
if(index != 0) {
table[index] = 0xff;
}
}
// Never reorder special low and high primary lead bytes.
assert(scriptStarts.length >= 2);
assert(scriptStarts[0] == 0);
int lowStart = scriptStarts[1];
assert(lowStart == ((Collation.MERGE_SEPARATOR_BYTE + 1) << 8));
int highLimit = scriptStarts[scriptStarts.length - 1];
assert(highLimit == (Collation.TRAIL_WEIGHT_BYTE << 8));
// Get the set of special reorder codes in the input list.
// This supports a fixed number of special reorder codes;
// it works for data with codes beyond Collator.ReorderCodes.LIMIT.
int specials = 0;
for(int i = 0; i < length; ++i) {
int reorderCode = reorder[i] - Collator.ReorderCodes.FIRST;
if(0 <= reorderCode && reorderCode < MAX_NUM_SPECIAL_REORDER_CODES) {
specials |= 1 << reorderCode;
}
}
// Start the reordering with the special low reorder codes that do not occur in the input.
for(int i = 0; i < MAX_NUM_SPECIAL_REORDER_CODES; ++i) {
int index = scriptsIndex[numScripts + i];
if(index != 0 && (specials & (1 << i)) == 0) {
lowStart = addLowScriptRange(table, index, lowStart);
}
}
// Skip the reserved range before Latin if Latin is the first script,
// so that we do not move it unnecessarily.
int skippedReserved = 0;
if(specials == 0 && reorder[0] == UScript.LATIN && !latinMustMove) {
int index = scriptsIndex[UScript.LATIN];
assert(index != 0);
int start = scriptStarts[index];
assert(lowStart <= start);
skippedReserved = start - lowStart;
lowStart = start;
}
// Reorder according to the input scripts, continuing from the bottom of the primary range.
boolean hasReorderToEnd = false;
for(int i = 0; i < length;) {
int script = reorder[i++];
if(script == UScript.UNKNOWN) {
// Put the remaining scripts at the top.
hasReorderToEnd = true;
while(i < length) {
script = reorder[--length];
if(script == UScript.UNKNOWN) { // Must occur at most once.
throw new IllegalArgumentException(
"setReorderCodes(): duplicate UScript.UNKNOWN");
}
if(script == Collator.ReorderCodes.DEFAULT) {
throw new IllegalArgumentException(
"setReorderCodes(): UScript.DEFAULT together with other scripts");
}
int index = getScriptIndex(script);
if(index == 0) { continue; }
if(table[index] != 0) { // Duplicate or equivalent script.
throw new IllegalArgumentException(
"setReorderCodes(): duplicate or equivalent script " +
scriptCodeString(script));
}
highLimit = addHighScriptRange(table, index, highLimit);
}
break;
}
if(script == Collator.ReorderCodes.DEFAULT) {
// The default code must be the only one in the list, and that is handled by the caller.
// Otherwise it must not be used.
throw new IllegalArgumentException(
"setReorderCodes(): UScript.DEFAULT together with other scripts");
}
int index = getScriptIndex(script);
if(index == 0) { continue; }
if(table[index] != 0) { // Duplicate or equivalent script.
throw new IllegalArgumentException(
"setReorderCodes(): duplicate or equivalent script " +
scriptCodeString(script));
}
lowStart = addLowScriptRange(table, index, lowStart);
}
// Put all remaining scripts into the middle.
for(int i = 1; i < scriptStarts.length - 1; ++i) {
int leadByte = table[i];
if(leadByte != 0) { continue; }
int start = scriptStarts[i];
if(!hasReorderToEnd && start > lowStart) {
// No need to move this script.
lowStart = start;
}
lowStart = addLowScriptRange(table, i, lowStart);
}
if(lowStart > highLimit) {
if((lowStart - (skippedReserved & 0xff00)) <= highLimit) {
// Try not skipping the before-Latin reserved range.
makeReorderRanges(reorder, true, ranges);
return;
}
// We need more primary lead bytes than available, despite the reserved ranges.
throw new ICUException(
"setReorderCodes(): reordering too many partial-primary-lead-byte scripts");
}
// Turn lead bytes into a list of (limit, offset) pairs.
// Encode each pair in one list element:
// Upper 16 bits = limit, lower 16 = signed lead byte offset.
int offset = 0;
for(int i = 1;; ++i) {
int nextOffset = offset;
while(i < scriptStarts.length - 1) {
int newLeadByte = table[i];
if(newLeadByte == 0xff) {
// "Don't care" lead byte for reserved range, continue with current offset.
} else {
nextOffset = newLeadByte - (scriptStarts[i] >> 8);
if(nextOffset != offset) { break; }
}
++i;
}
if(offset != 0 || i < scriptStarts.length - 1) {
ranges.addElement(((int)scriptStarts[i] << 16) | (offset & 0xffff));
}
if(i == scriptStarts.length - 1) { break; }
offset = nextOffset;
}
}
private int addLowScriptRange(short[] table, int index, int lowStart) {
int start = scriptStarts[index];
if((start & 0xff) < (lowStart & 0xff)) {
lowStart += 0x100;
}
table[index] = (short)(lowStart >> 8);
int limit = scriptStarts[index + 1];
lowStart = ((lowStart & 0xff00) + ((limit & 0xff00) - (start & 0xff00))) | (limit & 0xff);
return lowStart;
}
private int addHighScriptRange(short[] table, int index, int highLimit) {
int limit = scriptStarts[index + 1];
if((limit & 0xff) > (highLimit & 0xff)) {
highLimit -= 0x100;
}
int start = scriptStarts[index];
highLimit = ((highLimit & 0xff00) - ((limit & 0xff00) - (start & 0xff00))) | (start & 0xff);
table[index] = (short)(highLimit >> 8);
return highLimit;
}
private static String scriptCodeString(int script) {
// Do not use the script name here: We do not want to depend on that data.
return (script < Collator.ReorderCodes.FIRST) ?
Integer.toString(script) : "0x" + Integer.toHexString(script);
}
private static final int[] EMPTY_INT_ARRAY = new int[0];
/** @see jamoCE32s */
static final int JAMO_CE32S_LENGTH = 19 + 21 + 27;
/** Main lookup trie. */
Trie2_32 trie;
/**
* Array of CE32 values.
* At index 0 there must be CE32(U+0000)
* to support U+0000's special-tag for NUL-termination handling.
*/
int[] ce32s;
/** Array of CE values for expansions and OFFSET_TAG. */
long[] ces;
/** Array of prefix and contraction-suffix matching data. */
String contexts;
/** Base collation data, or null if this data itself is a base. */
public CollationData base;
/**
* Simple array of JAMO_CE32S_LENGTH=19+21+27 CE32s, one per canonical Jamo L/V/T.
* They are normally simple CE32s, rarely expansions.
* For fast handling of HANGUL_TAG.
*/
int[] jamoCE32s = new int[JAMO_CE32S_LENGTH];
public Normalizer2Impl nfcImpl;
/** The single-byte primary weight (xx000000) for numeric collation. */
long numericPrimary = 0x12000000;
/** 256 flags for which primary-weight lead bytes are compressible. */
public boolean[] compressibleBytes;
/**
* Set of code points that are unsafe for starting string comparison after an identical prefix,
* or in backwards CE iteration.
*/
UnicodeSet unsafeBackwardSet;
/**
* Fast Latin table for common-Latin-text string comparisons.
* Data structure see class CollationFastLatin.
*/
public char[] fastLatinTable;
/**
* Header portion of the fastLatinTable.
* In C++, these are one array, and the header is skipped for mapping characters.
* In Java, two arrays work better.
*/
char[] fastLatinTableHeader;
/**
* Data for scripts and reordering groups.
* Uses include building a reordering permutation table and
* providing script boundaries to AlphabeticIndex.
*/
int numScripts;
/**
* The length of scriptsIndex is numScripts+16.
* It maps from a UScriptCode or a special reorder code to an entry in scriptStarts.
* 16 special reorder codes (not all used) are mapped starting at numScripts.
* Up to MAX_NUM_SPECIAL_REORDER_CODES are codes for special groups like space/punct/digit.
* There are special codes at the end for reorder-reserved primary ranges.
*
* <p>Multiple scripts may share a range and index, for example Hira & Kana.
*/
char[] scriptsIndex;
/**
* Start primary weight (top 16 bits only) for a group/script/reserved range
* indexed by scriptsIndex.
* The first range (separators & terminators) and the last range (trailing weights)
* are not reorderable, and no scriptsIndex entry points to them.
*/
char[] scriptStarts;
/**
* Collation elements in the root collator.
* Used by the CollationRootElements class. The data structure is described there.
* null in a tailoring.
*/
public long[] rootElements;
}
| apache-2.0 |
apache/sis | core/sis-metadata/src/test/java/org/apache/sis/internal/jaxb/gml/TimePeriodTest.java | 10654 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.internal.jaxb.gml;
import java.util.Map;
import java.util.HashMap;
import java.util.Locale;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.datatype.DatatypeConfigurationException;
import org.apache.sis.xml.XML;
import org.apache.sis.xml.Namespaces;
import org.apache.sis.xml.MarshallerPool;
import org.apache.sis.internal.xml.XmlUtilities;
import org.apache.sis.internal.temporal.DefaultTemporalFactory;
import org.apache.sis.test.xml.TestCase;
import org.junit.BeforeClass;
import org.junit.AfterClass;
import org.junit.Test;
import static org.apache.sis.test.MetadataAssert.*;
import static org.apache.sis.test.TestUtilities.date;
import static org.apache.sis.test.TestUtilities.format;
import org.apache.sis.internal.geoapi.temporal.Instant;
/**
* Tests the {@link TimePeriod} class. The XML fragments used in this test cases are derived from
* <a href="http://toyoda-eizi.blogspot.fr/2011/02/examples-of-gml-fragment-in-iso.html">here</a>.
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.2
* @since 0.3
* @module
*/
public final strictfp class TimePeriodTest extends TestCase {
/**
* A poll of configured {@link Marshaller} and {@link Unmarshaller}, created when first needed.
*/
private static MarshallerPool pool;
/**
* Set the marshalling context to a fixed locale and timezone before to create the
* JAXB wrappers for temporal objects.
*/
private void createContext() {
createContext(true, Locale.FRANCE, "CET");
}
/**
* Creates a GeoAPI instant object for the given date.
*/
private static Instant instant(final String date) {
return DefaultTemporalFactory.INSTANCE.createInstant(date(date));
}
/**
* Creates the XML (un)marshaller pool to be shared by all test methods.
* The (un)marshallers locale and timezone will be set to fixed values.
*
* @throws JAXBException if an error occurred while creating the pool.
*
* @see #disposeMarshallerPool()
*/
@BeforeClass
public static void createMarshallerPool() throws JAXBException {
final Map<String,Object> properties = new HashMap<>(4);
assertNull(properties.put(XML.LOCALE, Locale.FRANCE));
assertNull(properties.put(XML.TIMEZONE, "CET"));
pool = new MarshallerPool(JAXBContext.newInstance(TimeInstant.class, TimePeriod.class), properties);
}
/**
* Invoked by JUnit after the execution of every tests in order to dispose
* the {@link MarshallerPool} instance used internally by this class.
*/
@AfterClass
public static void disposeMarshallerPool() {
pool = null;
}
/**
* Tests time instant. The test is executed using an arbitrary locale and timezone.
*
* @throws JAXBException if an error occurred while marshalling.
* @throws DatatypeConfigurationException should never happen.
*/
@Test
public void testTimeInstant() throws JAXBException, DatatypeConfigurationException {
createContext();
final Marshaller marshaller = pool.acquireMarshaller();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller();
final TimeInstant instant = new TimeInstant();
instant.timePosition = XmlUtilities.toXML(context, date("1992-01-01 00:00:00"));
final String actual = marshal(marshaller, instant);
assertXmlEquals(
"<gml:TimeInstant xmlns:gml=\"" + Namespaces.GML + "\">\n" +
" <gml:timePosition>1992-01-01T01:00:00.000+01:00</gml:timePosition>\n" +
"</gml:TimeInstant>\n", actual, "xmlns:*");
final TimeInstant test = (TimeInstant) unmarshal(unmarshaller, actual);
assertEquals("1992-01-01 00:00:00", format(XmlUtilities.toDate(context, test.timePosition)));
pool.recycle(marshaller);
pool.recycle(unmarshaller);
}
/**
* Tests a time period using the GML 2 syntax.
* The test is executed using an arbitrary locale and timezone.
*
* @throws JAXBException if an error occurred while marshalling.
*/
@Test
public void testPeriodGML2() throws JAXBException {
createContext();
final TimePeriodBound begin = new TimePeriodBound.GML2(instant("1992-01-01 00:00:00"));
final TimePeriodBound end = new TimePeriodBound.GML2(instant("2007-12-31 00:00:00"));
testPeriod(begin, end,
"<gml:TimePeriod xmlns:gml=\"" + Namespaces.GML + "\">\n" +
" <gml:begin>\n" +
" <gml:TimeInstant>\n" +
" <gml:timePosition>1992-01-01T01:00:00+01:00</gml:timePosition>\n" +
" </gml:TimeInstant>\n" +
" </gml:begin>\n" +
" <gml:end>\n" +
" <gml:TimeInstant>\n" +
" <gml:timePosition>2007-12-31T01:00:00+01:00</gml:timePosition>\n" +
" </gml:TimeInstant>\n" +
" </gml:end>\n" +
"</gml:TimePeriod>\n", true);
}
/**
* Tests a time period using GML2 or GML3 syntax. This method is used for the
* implementation of {@link #testPeriodGML2()} and {@link #testPeriodGML3()}.
* The test is executed using an arbitrary locale and timezone.
*
* @param expected the expected string.
*/
private void testPeriod(final TimePeriodBound begin, final TimePeriodBound end,
final String expected, final boolean verifyValues) throws JAXBException
{
final Marshaller marshaller = pool.acquireMarshaller();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller();
final TimePeriod period = new TimePeriod();
period.begin = begin;
period.end = end;
final String actual = marshal(marshaller, period);
assertXmlEquals(expected, actual, "xmlns:*");
final TimePeriod test = (TimePeriod) unmarshal(unmarshaller, actual);
if (verifyValues) {
assertEquals("1992-01-01 00:00:00", format(XmlUtilities.toDate(context, test.begin.calendar())));
assertEquals("2007-12-31 00:00:00", format(XmlUtilities.toDate(context, test.end .calendar())));
}
pool.recycle(marshaller);
pool.recycle(unmarshaller);
}
/**
* Tests a time period using the GML 3 syntax.
* The test is executed using an arbitrary locale and timezone.
*
* @throws JAXBException if an error occurred while marshalling.
*/
@Test
public void testPeriodGML3() throws JAXBException {
createContext();
final TimePeriodBound begin = new TimePeriodBound.GML3(instant("1992-01-01 00:00:00"), "before");
final TimePeriodBound end = new TimePeriodBound.GML3(instant("2007-12-31 00:00:00"), "after");
testPeriod(begin, end,
"<gml:TimePeriod xmlns:gml=\"" + Namespaces.GML + "\">\n" +
" <gml:beginPosition>1992-01-01T01:00:00+01:00</gml:beginPosition>\n" +
" <gml:endPosition>2007-12-31T01:00:00+01:00</gml:endPosition>\n" +
"</gml:TimePeriod>\n", true);
}
/**
* Same test than {@link #testPeriodGML3()}, but with simplified date format (omit the hours and timezone)
* The test is executed using an arbitrary locale and timezone.
*
* @throws JAXBException if an error occurred while marshalling.
*/
@Test
public void testSimplifiedPeriodGML3() throws JAXBException {
createContext();
final TimePeriodBound begin = new TimePeriodBound.GML3(instant("1992-01-01 23:00:00"), "before");
final TimePeriodBound end = new TimePeriodBound.GML3(instant("2007-12-30 23:00:00"), "after");
testPeriod(begin, end,
"<gml:TimePeriod xmlns:gml=\"" + Namespaces.GML + "\">\n" +
" <gml:beginPosition>1992-01-02</gml:beginPosition>\n" +
" <gml:endPosition>2007-12-31</gml:endPosition>\n" +
"</gml:TimePeriod>\n", false);
}
/**
* Same test than {@link #testSimplifiedPeriodGML3()}, but without beginning boundary.
* The test is executed using an arbitrary locale and timezone.
*
* @throws JAXBException if an error occurred while marshalling.
*/
@Test
public void testBeforePeriodGML3() throws JAXBException {
createContext();
final TimePeriodBound begin = new TimePeriodBound.GML3(null, "before");
final TimePeriodBound end = new TimePeriodBound.GML3(instant("2007-12-30 23:00:00"), "after");
testPeriod(begin, end,
"<gml:TimePeriod xmlns:gml=\"" + Namespaces.GML + "\">\n" +
" <gml:beginPosition indeterminatePosition=\"before\"/>\n" +
" <gml:endPosition>2007-12-31</gml:endPosition>\n" +
"</gml:TimePeriod>\n", false);
}
/**
* Same test than {@link #testSimplifiedPeriodGML3()}, but without end boundary.
* The test is executed using an arbitrary locale and timezone.
*
* @throws JAXBException if an error occurred while marshalling.
*/
@Test
public void testAfterPeriodGML3() throws JAXBException {
createContext();
final TimePeriodBound begin = new TimePeriodBound.GML3(instant("1992-01-01 23:00:00"), "before");
final TimePeriodBound end = new TimePeriodBound.GML3(null, "after");
testPeriod(begin, end,
"<gml:TimePeriod xmlns:gml=\"" + Namespaces.GML + "\">\n" +
" <gml:beginPosition>1992-01-02</gml:beginPosition>\n" +
" <gml:endPosition indeterminatePosition=\"after\"/>\n" +
"</gml:TimePeriod>\n", false);
}
}
| apache-2.0 |
Mogztter/jinjava | src/main/java/com/hubspot/jinjava/loader/CascadingResourceLocator.java | 825 | package com.hubspot.jinjava.loader;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Arrays;
import com.hubspot.jinjava.interpret.JinjavaInterpreter;
public class CascadingResourceLocator implements ResourceLocator {
private Iterable<ResourceLocator> locators;
public CascadingResourceLocator(ResourceLocator... locators) {
this.locators = Arrays.asList(locators);
}
@Override
public String getString(String fullName, Charset encoding,
JinjavaInterpreter interpreter) throws IOException {
for (ResourceLocator locator : locators) {
try {
return locator.getString(fullName, encoding, interpreter);
} catch (ResourceNotFoundException e) { /* */
}
}
throw new ResourceNotFoundException("Couldn't find resource: " + fullName);
}
}
| apache-2.0 |
mehdi149/OF_COMPILER_0.1 | gen-src/main/java/org/projectfloodlight/openflow/protocol/ver14/OFBsnGetInterfacesReplyVer14.java | 12333 | // Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import com.google.common.collect.ImmutableList;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnGetInterfacesReplyVer14 implements OFBsnGetInterfacesReply {
private static final Logger logger = LoggerFactory.getLogger(OFBsnGetInterfacesReplyVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int MINIMUM_LENGTH = 16;
private final static long DEFAULT_XID = 0x0L;
private final static List<OFBsnInterface> DEFAULT_INTERFACES = ImmutableList.<OFBsnInterface>of();
// OF message fields
private final long xid;
private final List<OFBsnInterface> interfaces;
//
// Immutable default instance
final static OFBsnGetInterfacesReplyVer14 DEFAULT = new OFBsnGetInterfacesReplyVer14(
DEFAULT_XID, DEFAULT_INTERFACES
);
// package private constructor - used by readers, builders, and factory
OFBsnGetInterfacesReplyVer14(long xid, List<OFBsnInterface> interfaces) {
if(interfaces == null) {
throw new NullPointerException("OFBsnGetInterfacesReplyVer14: property interfaces cannot be null");
}
this.xid = xid;
this.interfaces = interfaces;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0xaL;
}
@Override
public List<OFBsnInterface> getInterfaces() {
return interfaces;
}
public OFBsnGetInterfacesReply.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnGetInterfacesReply.Builder {
final OFBsnGetInterfacesReplyVer14 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean interfacesSet;
private List<OFBsnInterface> interfaces;
BuilderWithParent(OFBsnGetInterfacesReplyVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnGetInterfacesReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0xaL;
}
@Override
public List<OFBsnInterface> getInterfaces() {
return interfaces;
}
@Override
public OFBsnGetInterfacesReply.Builder setInterfaces(List<OFBsnInterface> interfaces) {
this.interfaces = interfaces;
this.interfacesSet = true;
return this;
}
@Override
public OFBsnGetInterfacesReply build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
List<OFBsnInterface> interfaces = this.interfacesSet ? this.interfaces : parentMessage.interfaces;
if(interfaces == null)
throw new NullPointerException("Property interfaces must not be null");
//
return new OFBsnGetInterfacesReplyVer14(
xid,
interfaces
);
}
}
static class Builder implements OFBsnGetInterfacesReply.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean interfacesSet;
private List<OFBsnInterface> interfaces;
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnGetInterfacesReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0xaL;
}
@Override
public List<OFBsnInterface> getInterfaces() {
return interfaces;
}
@Override
public OFBsnGetInterfacesReply.Builder setInterfaces(List<OFBsnInterface> interfaces) {
this.interfaces = interfaces;
this.interfacesSet = true;
return this;
}
//
@Override
public OFBsnGetInterfacesReply build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
List<OFBsnInterface> interfaces = this.interfacesSet ? this.interfaces : DEFAULT_INTERFACES;
if(interfaces == null)
throw new NullPointerException("Property interfaces must not be null");
return new OFBsnGetInterfacesReplyVer14(
xid,
interfaces
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnGetInterfacesReply> {
@Override
public OFBsnGetInterfacesReply readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 5
byte version = bb.readByte();
if(version != (byte) 0x5)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version);
// fixed value property type == 4
byte type = bb.readByte();
if(type != (byte) 0x4)
throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0xaL
int subtype = bb.readInt();
if(subtype != 0xa)
throw new OFParseError("Wrong subtype: Expected=0xaL(0xaL), got="+subtype);
List<OFBsnInterface> interfaces = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBsnInterfaceVer14.READER);
OFBsnGetInterfacesReplyVer14 bsnGetInterfacesReplyVer14 = new OFBsnGetInterfacesReplyVer14(
xid,
interfaces
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnGetInterfacesReplyVer14);
return bsnGetInterfacesReplyVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnGetInterfacesReplyVer14Funnel FUNNEL = new OFBsnGetInterfacesReplyVer14Funnel();
static class OFBsnGetInterfacesReplyVer14Funnel implements Funnel<OFBsnGetInterfacesReplyVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnGetInterfacesReplyVer14 message, PrimitiveSink sink) {
// fixed value property version = 5
sink.putByte((byte) 0x5);
// fixed value property type = 4
sink.putByte((byte) 0x4);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0xaL
sink.putInt(0xa);
FunnelUtils.putList(message.interfaces, sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnGetInterfacesReplyVer14> {
@Override
public void write(ByteBuf bb, OFBsnGetInterfacesReplyVer14 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 5
bb.writeByte((byte) 0x5);
// fixed value property type = 4
bb.writeByte((byte) 0x4);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0xaL
bb.writeInt(0xa);
ChannelUtils.writeList(bb, message.interfaces);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnGetInterfacesReplyVer14(");
b.append("xid=").append(xid);
b.append(", ");
b.append("interfaces=").append(interfaces);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnGetInterfacesReplyVer14 other = (OFBsnGetInterfacesReplyVer14) obj;
if( xid != other.xid)
return false;
if (interfaces == null) {
if (other.interfaces != null)
return false;
} else if (!interfaces.equals(other.interfaces))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((interfaces == null) ? 0 : interfaces.hashCode());
return result;
}
}
| apache-2.0 |
jpw-erigo/cloudturbine | JavaCode/CTweb/src/main/java/ctweb/CTweb.java | 59426 | /*
Copyright 2018 Cycronix
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//---------------------------------------------------------------------------------
// CTweb: Web HTTP interface to CTreader
// Matt Miller, Cycronix
// 11/01/2016 revert to Jetty
// 07/16/2014 converted to NanoHTTPD
// 04/04/2016 updated to NanoHTTPD V2.2
// 02/18/2014 initial version using Jetty
/*
URL Syntax:
http://cloudturbine.net:/CT/Source/Channel?key1=value&key2=value
where key-value pairs:
key value examples description
t time 123456789 time relative to tref (sec)
r tref absolute,newest,oldest time reference
d duration 100 time interval (sec)
dt datatype s format as string (s) default, binary (b), HTML (H)
f timefetch t,d f=t to fetch time-only (default time+data)
*/
package ctweb;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.Transparency;
import java.awt.image.BufferedImage;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Properties;
import javax.imageio.ImageIO;
//import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
//import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.http.HttpVersion;
import org.eclipse.jetty.security.ConstraintMapping;
import org.eclipse.jetty.security.ConstraintSecurityHandler;
import org.eclipse.jetty.security.HashLoginService;
import org.eclipse.jetty.security.authentication.BasicAuthenticator;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.security.Constraint;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.server.Connector;
import cycronix.ctlib.CTdata;
import cycronix.ctlib.CTinfo;
import cycronix.ctlib.CTreader;
import cycronix.ctlib.CTwriter;
//---------------------------------------------------------------------------------
public class CTweb {
private static final String servletRoot = "/CT";
private static final String rbnbRoot = "/RBNB";
// private static String rootFolder="CTdata";
private static String rootFolder=null; // for compat with CT2DB
private static CTreader ctreader=null;
public static boolean debug=false;
public static boolean Debug=false; // debug local plus CT
private static boolean swapFlag = false;
// private static String resourceBase = "CTweb";
private static String resourceBase = null; // search for resource base
private static String sourceFolder = null;
private static int MaxDat = 10000000; // max number data elements to return (was 65536)
private static long queryCount=0;
private static String keyStoreFile="ctweb.jks"; // HTTPS keystore file path
private static String keyStorePW="ctweb.pw"; // keystore PW
private static String realmProps=null; // authentication realm user/password info
private static int port = 8000; // default port
private static int sslport = 8443; // HTTPS port (0 means none)
private static String password=null; // CTcrypto password
private static int scaleImage=1; // reduce image size by factor
private static boolean fastSearch=false; // fast channel search, reduces startup time
private static String CTwebPropsFile=null; // redirect to other CTweb
private static Properties CTwebProps=null; // proxy server Name=Server properties
private static boolean preCache = false; // pre-build index cache
// private static ArrayList<String> CTwriters = new ArrayList<String>(); // list of CTwriters (one per source)
private static HashMap<String, CTwriter> CTwriters = new HashMap<String,CTwriter>(); // list of CTwriters (one per source)
HashMap<Integer, String> hmap = new HashMap<Integer, String>();
private static int maxCTwriters = 0; // crude way to limit out of control PUTs
private static double keepTime=0., lastTime=0.; // CTwriter.dotrim() keep-duration for PUTs
private static CTwriter ctwriter = null; // CTwriter for dotrim
private static String lastResponseString=null; // track cache-response logic
private static String lastRequest =null;
// private static String lastHeaderFolders = null;
private static long lastRequestTime = 0;
// private static HttpServletResponse lastResponse = null;
//---------------------------------------------------------------------------------
public static void main(String[] args) throws Exception {
if(args.length == 0) {
System.err.println("CTweb -r -x -X -F -W -p <port> -P <sslport> -f <webfolder> -s <sourceFolder> -k <keystoreFile> -K <keystorePW> -a <authenticationFile> -S <scaleImage> -R <routingFile> rootFolder");
if(args!=null && args.length>0 && args[0].equals("-h")) System.exit(0); // print help and exit
}
int dirArg = 0;
while((dirArg<args.length) && args[dirArg].startsWith("-")) { // arg parsing
if(args[dirArg].equals("-r")) swapFlag = true;
if(args[dirArg].equals("-x")) debug = true;
if(args[dirArg].equals("-X")) Debug=true;
if(args[dirArg].equals("-C")) preCache=true;
if(args[dirArg].equals("-F")) fastSearch = !fastSearch;
if(args[dirArg].equals("-p")) port = Integer.parseInt(args[++dirArg]);
if(args[dirArg].equals("-P")) sslport = Integer.parseInt(args[++dirArg]);
if(args[dirArg].equals("-f")) resourceBase = args[++dirArg];
if(args[dirArg].equals("-s")) sourceFolder = args[++dirArg];
if(args[dirArg].equals("-k")) keyStoreFile = args[++dirArg];
if(args[dirArg].equals("-K")) keyStorePW = args[++dirArg];
if(args[dirArg].equals("-a")) realmProps = args[++dirArg];
if(args[dirArg].equals("-S")) scaleImage = Integer.parseInt(args[++dirArg]);
if(args[dirArg].equals("-e")) password = args[++dirArg];
if(args[dirArg].equals("-R")) CTwebPropsFile = args[++dirArg];
if(args[dirArg].equals("-W")) maxCTwriters = Integer.parseInt(args[++dirArg]);
if(args[dirArg].equals("-w")) keepTime = Double.parseDouble(args[++dirArg]);
dirArg++;
}
if(args.length > dirArg) rootFolder = args[dirArg++];
// load proxy properties
if(CTwebPropsFile != null) loadProps();
// If sourceFolder has been specified, make sure it exists
if ( (sourceFolder != null) && ( (new File(sourceFolder).exists() == false) || (new File(sourceFolder).isDirectory() == false) ) ) {
System.err.println("The source folder doesn't exist or isn't a directory.");
System.exit(0);
}
// set rootFolder
if(rootFolder == null && sourceFolder != null) { // source is full path
rootFolder = new File(sourceFolder).getParent();
sourceFolder = new File(sourceFolder).getName();
}
else if(rootFolder == null) { // check for a couple defaults
if (new File("CTdata").exists()) rootFolder = "CTdata";
// else if (new File(".."+File.separator+"CTdata").exists()) rootFolder = ".."+File.separator+"CTdata";
// else if (new File("CloudTurbine").exists()) rootFolder = "CloudTurbine";
else {
System.err.println("Cannot find default data folder. Please specify.");
System.exit(0);
}
}
else {
if(!(new File(rootFolder).exists())) {
System.err.println("Cannot find specified data folder: "+rootFolder);
System.exit(0);
}
}
// set resourceBase
if(resourceBase==null) {
if(new File("CTweb").exists()) resourceBase = "CTweb";
else resourceBase = "http://webscan.cycronix.com";
}
// create CT reader
ctreader = new CTreader(rootFolder);
if(password!=null) ctreader.setPassword(password, true); // optional decrypt
CTinfo.setDebug(Debug);
if(preCache) ctreader.preCache();
// setup and start Jetty HTTP server
Server server = setupHTTP();
server.start();
server.join();
}
//---------------------------------------------------------------------------------
// setup HTTP/S Jetty
private static Server setupHTTP() throws FileNotFoundException {
// Create a basic jetty server object without declaring the port.
Server server = new Server();
// HTTP Configuration
HttpConfiguration http_config = new HttpConfiguration();
if(sslport>0) {
http_config.setSecureScheme("https");
http_config.setSecurePort(sslport);
http_config.addCustomizer(new SecureRequestCustomizer(false,0L,false)); // disable HSTS, allow HTTP and HTTPS both
}
http_config.setOutputBufferSize(32768);
// HTTP connector
ServerConnector http = new ServerConnector(server,
new HttpConnectionFactory(http_config));
http.setPort(port);
http.setIdleTimeout(30000);
if(sslport>0) { // setup HTTPS
File ksFile = new File(keyStoreFile);
if (ksFile.exists()) {
// SSL Context Factory for HTTPS
SslContextFactory sslContextFactory = new SslContextFactory();
// sslContextFactory.setExcludeCipherSuites("^.*_(MD5|SHA|SHA1)$"); // enable old ciphers?
sslContextFactory.setKeyStorePath(ksFile.getAbsolutePath());
sslContextFactory.setKeyStorePassword(keyStorePW);
// sslContextFactory.setKeyManagerPassword(keypw);
// HTTPS Configuration
// On this HttpConfiguration object we add a
// SecureRequestCustomizer which is how a new connector is able to
// resolve the https connection before handing control over to the Jetty Server.
HttpConfiguration https_config = new HttpConfiguration(http_config);
SecureRequestCustomizer src = new SecureRequestCustomizer();
src.setStsMaxAge(2000);
src.setStsIncludeSubDomains(true);
https_config.addCustomizer(src);
// HTTPS connector
// We create a second ServerConnector, passing in the http configuration
// we just made along with the previously created ssl context factory.
// Next we set the port and a longer idle timeout.
ServerConnector https = new ServerConnector(server,
new SslConnectionFactory(sslContextFactory,HttpVersion.HTTP_1_1.asString()),
new HttpConnectionFactory(https_config));
https.setPort(sslport);
https.setIdleTimeout(500000);
// Here you see the server having multiple connectors registered with
// it, now requests can flow into the server from both http and https
// urls to their respective ports and be processed accordingly by jetty.
// Set the connectors
server.setConnectors(new Connector[] { http, https });
}
else {
System.err.println("Keystore file ("+keyStoreFile+") not found; HTTPS disabled.");
sslport = 0;
server.setConnectors(new Connector[] { http });
}
}
else server.setConnectors(new Connector[] { http });
// Set a handler
ServletHandler shandler = new ServletHandler();
ServletHolder sholder;
sholder = new ServletHolder(new CTServlet());
sholder.setAsyncSupported(true); // need fewer threads if non-blocking?
sholder.setInitParameter("maxThreads", "100"); // how many is good?
shandler.addServletWithMapping(sholder, "/*");
setupAuthentication(server,shandler); // set handler with optional authentication
GzipHandler gzipHandler = new GzipHandler();
gzipHandler.setMinGzipSize(1024);
// gzipHandler.setCompressionLevel(5);
gzipHandler.setHandler(server.getHandler());
server.setHandler(gzipHandler);
String msg;
if(sslport > 0) msg = ", HTTP port: "+port+", HTTPS port: "+sslport;
else msg = ", HTTP port: "+port;
System.out.println("Server started. webFolder: "+resourceBase+", dataFolder: "+rootFolder+msg+"\n");
return server;
}
//---------------------------------------------------------------------------------
// setup authentication.
// This method is based on the "Secured Hello Handler" example found at
// https://www.eclipse.org/jetty/documentation/9.4.x/embedded-examples.html
// Copyright notice follows:
// ========================================================================
// Copyright (c) 1995-2018 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
private static void setupAuthentication(Server server, ServletHandler shandler) {
if(realmProps == null) { // notta
server.setHandler(shandler);
return;
}
// setup a hashmap based LoginService
HashLoginService loginService = new HashLoginService("CTrealm",realmProps);
server.addBean(loginService);
// The ConstraintSecurityHandler allows matching of urls to different constraints.
ConstraintSecurityHandler security = new ConstraintSecurityHandler();
server.setHandler(security);
// This constraint requires authentication and that an
// authenticated user be a member of a given set of roles
Constraint constraint = new Constraint();
constraint.setName("auth");
constraint.setAuthenticate(true);
// constraint.setRoles(new String[] { "user", "admin" });
constraint.setRoles(new String[] { "**" }); // any role
// Binds a url pattern with the previously created constraint.
ConstraintMapping mapping = new ConstraintMapping();
mapping.setPathSpec("/*");
mapping.setConstraint(constraint);
// Next we set a BasicAuthenticator that checks the credentials
// followed by the LoginService which is the store of known users, etc.
security.setConstraintMappings(Collections.singletonList(mapping));
security.setAuthenticator(new BasicAuthenticator());
security.setLoginService(loginService);
// set the given servlet handler on the to complete the simple handler chain.
security.setHandler(shandler);
}
//---------------------------------------------------------------------------------
// callback for http requests
@SuppressWarnings("serial")
public static class CTServlet extends HttpServlet {
@Override
protected void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if(debug) System.err.println("doOptions, request: "+request.getPathInfo()+", queryCount: "+queryCount+", request.method: "+request.getMethod());
response.addHeader("Access-Control-Allow-Origin", "*"); // CORS enable
response.addHeader("Access-Control-Allow-Methods", "GET, POST, HEAD, OPTIONS"); // CORS enable
response.addHeader("Access-Control-Allow-Headers", "If-None-Match");
// response.addHeader("Allow", "GET, POST, HEAD, OPTIONS");
}
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
long startTime = System.nanoTime();
boolean doProfile=debug;
if(debug) {
String uri = request.getScheme() + "://" +
request.getServerName() +
("http".equals(request.getScheme()) && request.getServerPort() == 80 || "https".equals(request.getScheme()) && request.getServerPort() == 443 ? "" : ":" + request.getServerPort() ) +
request.getRequestURI() +
(request.getQueryString() != null ? "?" + request.getQueryString() : "");
System.err.println("doGet, URI: "+uri+", queryCount: "+queryCount+", request.method: "+request.getMethod());
}
String requestURI = request.getRequestURI();
//--------------------------------
// redirect if Proxy server
// local sources have priority; only redirect if proxy-source is unique
ArrayList<String> sourceList = new ArrayList<String>();
// boolean isRedirect = request.getParameter("redirect")!=null;
// System.err.println("isRedirect: "+isRedirect+", qs: "+request.getQueryString());
// if(CTwebProps != null && !isRedirect) { // no recursive redirects...
if(CTwebProps != null) {
if(sourceFolder == null) sourceList = ctreader.listSources();
else sourceList.add(sourceFolder);
Enumeration CTnames = CTwebProps.propertyNames();
while(CTnames.hasMoreElements()) {
String CTname = (String)CTnames.nextElement();
if(sourceList.contains(CTname)) { // local sources take priority over routes
System.err.println("routed path ignored (duplicate source): "+CTname);
CTwebProps.remove(CTname);
continue;
}
requestURI = requestURI.replace("//", "/"); // replace any double with single slash
if(requestURI.startsWith(servletRoot+"/"+CTname) || requestURI.startsWith(rbnbRoot+"/"+CTname)) {
// String redirectRequest = requestURI.replace("/"+CTname, ""); // drop CTname in redirect request
String uri = request.getScheme() + "://" + CTwebProps.getProperty(CTname) +
requestURI +
// redirectRequest +
// (request.getQueryString() != null ? "?" + request.getQueryString() + "&redirect=true": "?redirect=true");
(request.getQueryString() != null ? "?" + request.getQueryString() : "");
if(debug) System.err.println("redirect URI: "+requestURI+" to: "+uri);
response.sendRedirect(uri);
return;
}
}
}
//--------------------------------
// String servletPath = request.getServletPath();
String pathInfo = request.getPathInfo();
queryCount++;
StringBuilder sbresp = new StringBuilder(8192); // estimate initial size
// server resource files
if(!pathInfo.startsWith(servletRoot) && !pathInfo.startsWith(rbnbRoot)) {
try {
// System.err.println("pathInfo: <"+pathInfo+">, CTwebProps: "+CTwebProps);
// proxy-mode register child route:
if(pathInfo.startsWith("/addroute")) {
if(CTwebProps == null) CTwebProps = new Properties();
String[] routeinfo = request.getQueryString().split("=");
String src=null, addr=null;
if(routeinfo.length==1) {
src = routeinfo[0];
addr = request.getRemoteAddr() + ":8000";
}
else if(routeinfo.length == 2) {
src = routeinfo[0];
addr = routeinfo[1];
}
else return;
System.err.println("addroute, src: "+src+", addr: "+addr);
CTwebProps.put(src, addr);
response.getWriter().println("addroute, src: "+src+", addr: "+addr);
return;
}
if(pathInfo.startsWith("/listroutes")) {
if(CTwebProps == null) {
response.getWriter().println("listroutes: <None>");
return;
}
Enumeration CTnames = CTwebProps.propertyNames();
response.getWriter().println("routes:");
while(CTnames.hasMoreElements()) {
String key = (String)CTnames.nextElement();
response.getWriter().println(key+" = "+CTwebProps.getProperty(key, "<none>"));
}
return;
}
// system clock utility
if(pathInfo.equals("/sysclock")) {
response.setContentType("text/plain");
response.getWriter().println(""+System.currentTimeMillis());
return;
}
InputStream in;
OutputStream out;
response.setContentType(mimeType(pathInfo, "text/html"));
if(resourceBase.startsWith("http")) {
if(pathInfo.equals("/")) pathInfo = "/webscan.htm";
in = new URL(resourceBase + pathInfo).openStream();
out = response.getOutputStream();
}
else {
if(pathInfo.equals("/")) {
if(new File(resourceBase+"/index.htm").exists()) pathInfo = "/index.htm";
else if(new File(resourceBase+"/index.html").exists()) pathInfo = "/index.html";
else pathInfo = "/webscan.htm";
}
out = response.getOutputStream();
in = new FileInputStream(resourceBase+pathInfo); // limit to resourceBase folder
}
// read/write response
byte[] buffer = new byte[4096];
int length;
while ((length = in.read(buffer)) > 0){
out.write(buffer, 0, length);
}
in.close();
out.flush();
}
catch(Exception e) {
System.err.println("Exception on welcome file read, pathInfo: "+resourceBase+pathInfo+", Exception: "+e);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
}
return;
}
if(doProfile) System.err.println("doGet 1 time: "+((System.nanoTime()-startTime)/1000000.)+" ms, Memory Used MB: " + (double) (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / (1024*1024));
pathInfo = pathInfo.replace("//", "/"); // merge any empty double-slash layers
String pathParts[] = pathInfo.split("/"); // split into 2 parts: cmd/multi-part-path
try {
double duration=0., start=0.;
String reference="newest";
String param; char ftype='s'; char fetch = 'b';
long cacheDur = 100; // default 100ms (for wildcard requests only)
param = request.getParameter("d"); if(param != null) duration = Double.parseDouble(param);
param = request.getParameter("t"); if(param != null) { start = Double.parseDouble(param); reference="absolute"; }
param = request.getParameter("r"); if(param != null) reference = param;
param = request.getParameter("f"); if(param != null) fetch = param.charAt(0);
param = request.getParameter("dt"); if(param != null) ftype = param.charAt(0);
param = request.getParameter("c"); if(param != null) cacheDur = Long.parseLong(param);
if(reference.equals("refresh")) {
ctreader.clearFileListCache();
formResponse(response,null);
return;
}
if(pathInfo.equals(servletRoot+"/") || pathInfo.equals(rbnbRoot+"/")) pathInfo = servletRoot; // strip trailing slash
if(pathInfo.equals(servletRoot) || pathInfo.equals(rbnbRoot)) { // Root level request for Sources
if(debug) System.err.println("source request: "+pathInfo);
printHeader(sbresp,pathInfo,"/");
// ArrayList<String> sourceList = new ArrayList<String>();
// if(CTwebProps == null || isRedirect) { // no proxy: show child routes only
if(sourceList.size() == 0) { // may have been filled in proxy-check above
if(sourceFolder == null) sourceList = ctreader.listSources();
else sourceList.add(sourceFolder);
}
// proxy server sources:
if(CTwebProps != null) {
Enumeration CTnames = CTwebProps.propertyNames();
while(CTnames.hasMoreElements()) {
String CTname = (String)CTnames.nextElement();
if(!sourceList.contains(CTname)) sourceList.add(CTname); // no dupes
}
}
if(sourceList==null || sourceList.size()==0) sbresp.append("No Sources!");
else {
Collections.sort(sourceList, new Comparator<String>() { // sort source list
@Override public int compare(String s1, String s2) { return s1.compareToIgnoreCase(s2); }
});
for(String sname : sourceList) {
sname = sname.replace("\\", "/"); // backslash not legal URL link
if(debug) System.err.println("src: "+sname);
// if(debug) System.err.println("src: "+sname+", sourceDiskSize: "+ (CTinfo.diskUsage(rootFolder+File.separator+sname,4096)/1024)+"K");
sbresp.append("<li><a href=\""+(pathInfo+"/"+sname)+"/\">"+sname+"/</a><br>");
}
}
formResponse(response, sbresp);
return;
}
else if(pathInfo.contains("/*")) { // wildcard source: /SourceBase/*/channel
// check if cache response works:
String fullrequest = request.getRequestURL()+"?"+request.getQueryString();
long thisTime = System.currentTimeMillis();
if ( ((thisTime - lastRequestTime) < cacheDur) && fullrequest.equals(lastRequest) /* && lastResponse!=null */) {
if(debug) System.err.println("Cached response for request: "+fullrequest);
// response.addHeader("folders", lastHeaderFolders); // Note: we are losing other header fields in cached response...
// TODO: send full cached response header.
response.getWriter().println(lastResponseString);
// response = lastResponse; // lastResponse is not deep clone of prior response.
// System.err.println("Cached response for request: "+fullrequest+", header.folder: "+response.getHeader("folders")+", lhf: "+lastHeaderFolders);
return;
}
// lastRequest = fullrequest;
lastRequestTime = thisTime;
// no cache, fetch CT data:
String[] sbaseParts = pathInfo.split("\\*");
String cbase = null;
if(sbaseParts.length > 1) cbase = pathParts[pathParts.length-1];
sbaseParts = sbaseParts[0].split("/");
String sbase = null;
// String sbase = sbaseParts[2];
if(sbaseParts.length > 2) {
sbase = sbaseParts[2];
for(int i=3; i<sbaseParts.length; i++) {
if(sbaseParts[i].equals("*")) break;
// sbase += ("/"+sbaseParts[i]);
sbase += (File.separator+sbaseParts[i]); // for Windows MJM 8/22/18
}
}
if(debug) System.err.println("got wildcard source, pathInfo: "+pathInfo+", sbase: "+sbase+", cbase: "+cbase);
// sourceList = ctreader.listSources();
File listFile[] = new File(rootFolder+File.separator+sbase).listFiles(); // quick and dirty folder list
// simply append all matching sources/chans as single response:
double oldTime=0, newTime=0, lagTime=0, sTime=0, eTime=0;
// for(String sname : sourceList) {
if(listFile != null) { // no-file check
// String folders = "";
for(File f:listFile) {
String sname = f.getName();
if(sname.startsWith(".")) continue; // skip hidden files
sname = sbase + File.separator + sname;
if(true) {
// if(sbase==null || sname.startsWith(sbase)) { // get it
ArrayList<String> clist = new ArrayList<String>();
if(cbase != null) clist.add(cbase);
else clist = ctreader.listChans(rootFolder+File.separator+sname,fastSearch);
for(String chan : clist) {
if(cbase==null || chan.equals(cbase)) {
CTdata tdata = ctreader.getData(sname,chan,start,duration,reference);
if(tdata != null && tdata.size()>0) { // MJM 8/1/18, 9/17/18
String[] dlist = tdata.getDataAsString(CTinfo.fileType(chan,'s'));
if(dlist != null) {
for(String d : dlist) sbresp.append(d+"\n");
}
// gather header info:
double[] tlimits = ctreader.timeLimits(sname, chan);
if(oldTime > tlimits[0]) oldTime = tlimits[0];
if(newTime==0 || newTime < tlimits[1]) newTime = tlimits[1];
lagTime = ((double)System.currentTimeMillis()/1000.) - newTime;
double time[] = tdata.getTime();
if(sTime==0 || time[0] > sTime) sTime = time[0]; // freshest
if(eTime==0 || time[time.length-1] > eTime) eTime = time[time.length-1];
// add file-list to header:
// System.err.println("Folder: "+f.getName());
// if(folders.length() > 0) folders += ",";
// folders += f.getName();
}
}
}
}
}
// response.addHeader("folders", folders); // custom wildcard '*' info
// lastHeaderFolders = folders;
}
if(sbresp.length() == 0) { // MJM 9/17/18: return SC_NOT_FOUND if no match
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
formHeader(response, sTime, eTime, oldTime, newTime, lagTime);
formResponse(response, sbresp);
synchronized(this) { // make sure cache response matches corresponding fullRequest
// lastResponse = response; // this doesn't actually keep a copy of internal fields...
lastResponseString = sbresp.toString();
lastRequest = fullrequest;
// System.err.println("response.headerfolder: "+response.getHeader("folders")+", lrhf: "+lastResponse.getHeader("folders"));
}
return;
}
else if(pathInfo.endsWith("/")) { // Source level request for Channels
if(debug) System.err.println("channel request: "+pathInfo);
if(pathParts.length < 3) {
formResponse(response, null); // add CORS header even for error response
if(debug) System.err.println("warning, pathparts.length<3: "+pathParts.length);
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
String sname = pathParts[2];
for(int i=3; i<pathParts.length; i++) sname += ("/"+pathParts[i]); // multi-level source name
if(sname.endsWith("/")) sname = sname.substring(0,sname.length()-2);
if(debug) System.err.println("CTweb listChans for source: "+(rootFolder+File.separator+sname));
ArrayList<String> clist = ctreader.listChans(rootFolder+File.separator+sname,fastSearch);
if(clist == null) sbresp.append("<NULL>");
else {
if(ftype == 'H') { // all chans in HTML table format
double time[] = null;
ArrayList<String[]> chanlist = new ArrayList<String[]>();
sbresp.append("<table id="+sname+">\n");
sbresp.append("<tr><th>Time</th>");
for(String chan : clist) {
sbresp.append("<th>"+chan+"</th>");
CTdata tdata = ctreader.getData(sname,chan,start,duration,reference);
if(time == null) time = tdata.getTime(); // presume all times follow first chan
chanlist.add(tdata.getDataAsString(CTinfo.fileType(chan,'s')));
}
sbresp.append("</tr>\n");
for(int i=0; i<time.length; i++) {
sbresp.append("<tr>");
sbresp.append("<td>"+(time[i]/86400.+25569.)+"</td>"); // spreadsheet time (epoch 1900)
for(int j=0; j<chanlist.size(); j++) {
String c[] = chanlist.get(j); // possibly unequal data sizes
if(i < c.length) sbresp.append("<td>"+c[i]+"</td>");
}
sbresp.append("</tr>\n");
}
sbresp.append("</table>");
}
else {
printHeader(sbresp,pathInfo,"/"+pathParts[1]);
for(String cname : clist) {
if(debug) System.err.println(sname+"/chan: "+cname);
sbresp.append("<li><a href=\""+cname+"\">"+cname+"</a><br>");
}
}
}
formResponse(response, sbresp);
return;
}
else { // Channel level request for data
if(debug) System.err.println("data request: "+pathInfo);
String source = pathParts[2];
for(int i=3; i<pathParts.length-1; i++) source += ("/"+pathParts[i]); // multi-level source name
// String chan = pathParts[3]; // presumes /CT/Source/Chan with no sub-level nesting
String chan = pathParts[pathParts.length-1];
// String sourcePath = rootFolder+File.separator+source;
String[] strdata=null;
// setTimeOnly partially-implemented
if(fetch == 't') ctreader.setTimeOnly(true); // don't waste time/memory getting data...
else ctreader.setTimeOnly(false);
if(doProfile) System.err.println("doGet <R time: "+((System.nanoTime()-startTime)/1000000.)+" ms, Memory Used MB: " + (double) (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / (1024*1024));
CTdata tdata = ctreader.getData(source,chan,start,duration,reference);
if(doProfile) System.err.println("doGet >R time: "+((System.nanoTime()-startTime)/1000000.)+" ms, Memory Used MB: " + (double) (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / (1024*1024));
if(tdata == null) { // empty response for WebTurbine compatibility
if(debug) System.err.println("No such channel: "+pathInfo+", chan: "+chan+", start: "+start+", duration: "+duration+", refernce: "+reference);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
else {
tdata.setSwap(swapFlag);
double time[] = tdata.getTime();
if(time == null) {
if(debug) System.err.println("Oops, got data but no time data?: "+pathInfo);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
int numData = time.length;
if(debug) System.err.println("--------CTweb getData: "+chan+", numData: "+numData+", fetch: "+fetch+", ftype: "+ftype+", pathInfo: "+pathInfo);
// check for If-None-Match and skip duplicates.
// if(duration==0 && fetch=='b' && reference.equals("absolute")) { // only works for single-object requests
if(numData>0) {
String ifnonematch = request.getHeader("If-None-Match");
if(ifnonematch != null) {
String[] matchparts = ifnonematch.split(":");
if(matchparts.length == 2 && matchparts[1].length()>0) {
String matchchan = matchparts[0];
double matchtime = Double.parseDouble(matchparts[1]); // int-msec
// long gottime = (long)(1000.* time[0]); // int-msec for compare
double gottime = 1000.* time[time.length-1]; // int-msec for compare to last (most recent) got-time
String reqchan = source + "/" + chan; // reconstruct full path
if(reqchan.startsWith("/")) reqchan = reqchan.substring(1); // strip leading '/' if present
if(debug) System.err.println("ifnonematch, gottime: "+gottime+", matchtime: "+matchtime+", matchchan: "+matchchan+", reqchan: "+reqchan);
if(doProfile) System.err.println("doGet 2a time: "+((System.nanoTime()-startTime)/1000000.)+" ms");
if(Math.abs(matchtime-gottime)<=1 && matchchan.equals(reqchan)) { // account for msec round off error
// add header info about time limits
// JPW, in next 2 calls, change from sourcePath to source (ie, don't use full path)
// double oldTime = ctreader.oldTime(source,chan);
// double newTime = ctreader.newTime(source,chan);
double[] tlimits = ctreader.timeLimits(source, chan);
double oldTime = tlimits[0];
double newTime = tlimits[1];
// System.err.println("newTime: "+newTime);
if(doProfile) System.err.println("doGet 2b time: "+((System.nanoTime()-startTime)/1000000.)+" ms");
double lagTime = ((double)System.currentTimeMillis()/1000.) - newTime;
formHeader(response, time[0], time[time.length-1], oldTime, newTime, lagTime);
formResponse(response,null);
response.sendError(HttpServletResponse.SC_NOT_MODIFIED);
if(doProfile) System.err.println("doGet 2c time: "+((System.nanoTime()-startTime)/1000000.)+" ms");
if(debug) System.err.println("NOT_MODIFIED: "+matchchan+", reqTime: "+start+", gotTime: "+gottime+", ref: "+reference+", newTime: "+newTime);
return;
}
}
}
}
if(numData > MaxDat && ftype != 'b') { // unlimited binary fetch
System.err.println("CTweb: limiting output points to: "+MaxDat);
numData = MaxDat;
}
// if(time.length == 0) System.err.println("CTweb warning: no data!");
if(numData > 0) {
if(ftype == 's' /* && fetch=='b' */) ftype = CTinfo.fileType(chan,'s'); // over-ride for certain binary types
if(fetch=='t') ftype ='s'; // time-only data returned as string
if(debug) System.err.println("getData: "+chan+"?t="+start+"&d="+duration+"&r="+reference+", ftype: "+ftype);
switch(ftype) {
// binary types returned as byteArrays (no time stamps sent!)
case 'b':
case 'B':
if(debug) System.err.println("binary data response...");
byte[] bdata = tdata.getDataAsByteArray(); // get as single byte array vs chunks
// add header info about time limits
// JPW, in next 2 calls, change from sourcePath to source (ie, don't use full path)
// double oldTime = ctreader.oldTime(source,chan);
// double newTime = ctreader.newTime(source,chan);
double[] tlimits = ctreader.timeLimits(source, chan);
double oldTime = tlimits[0];
double newTime = tlimits[1];
double lagTime = ((double)System.currentTimeMillis()/1000.) - newTime;
// formHeader(response, time[0], time[time.length-1], oldTime, newTime, lagTime);
if(chan.toLowerCase().endsWith(".jpg")) response.setContentType("image/jpeg");
else if(chan.toLowerCase().endsWith(".wav")) response.setContentType("audio/wav");
else response.setContentType("application/octet-stream");
if(bdata == null || bdata.length==0) {
if(debug) System.err.println("No data for request: "+pathInfo);
formHeader(response, 0., 0., oldTime, newTime, lagTime);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
else {
formHeader(response, time[0], time[time.length-1], oldTime, newTime, lagTime);
formResponse(response,null);
}
// down-size large images
if(chan.endsWith(".jpg") && (scaleImage>1) && bdata.length>100000) {
if(bdata.length<200000 && scaleImage>2) bdata = scale(bdata, 2);
else bdata = scale(bdata, scaleImage);
}
if(bdata.length < 65536) { // unchunked
// System.err.println("b.length: "+bdata.length);
response.setContentLength(bdata.length);
response.getOutputStream().write(bdata);
}
else { // chunked transfer
OutputStream out = response.getOutputStream();
InputStream input = new ByteArrayInputStream(bdata); // only return 1st image?
byte[] buffer = new byte[16384];
int length;
long totRead = 0;
while ((length = input.read(buffer)) > 0){
totRead += length;
out.write(buffer, 0, length);
}
if(debug) System.err.println("chunked transfer for: "+chan+", totalBytes: "+totRead);
input.close();
out.flush();
}
if(doProfile) System.err.println("doGet B time: "+((System.nanoTime()-startTime)/1000000.)+" ms");
if(debug) System.err.println("binary data response, bytes: "+bdata.length);
return;
// HTML table format (for import to spreadsheets)
case 'H':
strdata = tdata.getDataAsString(CTinfo.fileType(chan,'s')); // convert any/all numeric types to string
if(strdata != null) {
sbresp.append("<table id="+source+"/"+chan+">\n");
sbresp.append("<tr><th>Time</th><th>"+source+"/"+chan+"</th></tr>");
for(int i=time.length-numData; i<numData; i++) {
sbresp.append("<tr>");
if(fetch != 'd') sbresp.append("<td>"+(time[i]/86400.+25569.)+"</td>"); // spreadsheet time (epoch 1900)
if(fetch != 't') sbresp.append("<td>"+strdata[i]+"</td>");
sbresp.append("</tr>\n");
}
sbresp.append("</table>");
formResponse(response, sbresp);
if(debug) System.err.println("HTML data response, length: "+sbresp.length());
return;
}
else {
System.err.println("Unrecognized ftype: "+ftype);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
// all other types returned as rows of time,value strings
default:
if(fetch=='t') {
for(int i=time.length-numData; i<numData; i++) sbresp.append(formatTime(time[i])+"\n"); // most recent
}
else {
strdata = tdata.getDataAsString(ftype); // convert any/all numeric types to string
if(strdata != null) {
if(fetch=='d')
for(int i=time.length-numData; i<numData; i++) sbresp.append(strdata[i]+"\n");
else
for(int i=time.length-numData; i<numData; i++) sbresp.append(formatTime(time[i]) +","+strdata[i]+"\n");
}
else {
System.err.println("Unrecognized ftype: "+ftype);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
}
// add header info about time limits
// JPW, in next 2 calls, change from sourcePath to source (ie, don't use full path)
// oldTime = ctreader.oldTime(source,chan);
// newTime = ctreader.newTime(source,chan);
tlimits = ctreader.timeLimits(source, chan);
oldTime = tlimits[0];
newTime = tlimits[1];
lagTime = ((double)System.currentTimeMillis()/1000.) - newTime;
formHeader(response, time[0], time[time.length-1], oldTime, newTime, lagTime);
// response.setContentType(mimeType(pathInfo, "text/html"));
response.setContentType("text/html"); // all string data in this case!
formResponse(response, sbresp);
if(doProfile) System.err.println("doGet S time: "+((System.nanoTime()-startTime)/1000000.)+" ms, chan: "+chan);
if(debug) System.err.println("CSV data response, length: "+sbresp.length());
return;
}
}
else {
// add header info about time limits even if no data
if(debug) System.err.println("No data for: "+pathInfo);
// JPW, in next 2 calls, change from sourcePath to source (ie, don't use full path)
// double oldTime = ctreader.oldTime(source,chan);
// double newTime = ctreader.newTime(source,chan);
double[] tlimits = ctreader.timeLimits(source, chan);
double oldTime = tlimits[0];
double newTime = tlimits[1];
double lagTime = ((double)System.currentTimeMillis()/1000.) - newTime;
formHeader(response, start, start+duration, oldTime, newTime, lagTime);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
}
}
} catch(Exception e) {
System.err.println("CTweb doGet Exception: "+e);
if(debug) e.printStackTrace();
}
if(debug) System.err.println("Unable to respond to: "+pathInfo);
formResponse(response, null); // add CORS header even for error response
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
//---------------------------------------------------------------------------------
// doPut: store CT data on server
static int BUFSIZ = 8 * 1048576; // max single-buffer input file size
// static int BUFSIZ = 65536;
static byte[] CTbuffer = new byte[BUFSIZ]; // keep write-chunks big (put/get collision can get partial buffer)
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// if(debug) System.err.println("doPost! "+request.getRequestURI());
doPut(request, response);
}
@Override
synchronized protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if(maxCTwriters == 0) {
if(keepTime > 0.) maxCTwriters = 100; // keepTime enables writers
else {
System.err.println("CTweb PUT not allowed: "+request);
return;
}
}
ServletInputStream in = request.getInputStream();
// String[] parse = request.getPathInfo().split(File.separator);
String[] parse = request.getPathInfo().split("/"); // URLs use forward slash
if(parse.length < 3) { // presume leading slash
System.err.println("doPut source/chan parse error: "+request.getPathInfo());
return;
}
String folder = rootFolder;
String source = "";
for(int i=1; i<parse.length-1; i++) {
String f = parse[i];
Boolean checkTimeScale=true; // check for timestamp of form 10x100, i.e. qty 10 of 100units = 1000
if(checkTimeScale && Character.isDigit(f.charAt(0)) && f.contains("x") && Character.isDigit(f.charAt(f.length()-1))) {
String[] ff = f.split("x");
int scaledTime = Math.round(Float.parseFloat(ff[0]) * Float.parseFloat(ff[1]));
folder += File.separator + scaledTime;
}
else folder += File.separator + parse[i]; // add multi-part source dirs
}
Boolean embeddedTime = false;
for(int i=1; i<parse.length-1; i++) {
source += parse[i];
if(Character.isDigit(parse[i+1].charAt(0))) {
embeddedTime = true;
break; // check next for numeric (time folder)
}
source += File.separator;
}
String file = parse[parse.length-1];
// security limit check on number of sources
if(!CTwriters.containsKey(source)) {
if(CTwriters.size() >= maxCTwriters) {
System.err.println("CTweb, no more CTwriters! (max="+maxCTwriters+"), this: "+source);
return;
}
// CTwriters.add(source);
CTwriter ctw = new CTwriter(rootFolder+File.separator+source, keepTime);
ctw.setBlockMode(false,preCache); // no pack, zip if precache set
ctw.autoFlush(preCache?1000:0); // autoflush if preCache
ctw.autoSegment(1000); // was 1000
CTwriters.put(source, ctw);
if(debug) System.err.println("CTwriters.size: "+CTwriters.size()+", add: "+source);
}
if(embeddedTime) {
// simply write file, presume pre-processed on client to correct CT folder/file structure
File targetFile = new File(folder + File.separator + file);
if(debug) System.err.println("doPut, folder: "+folder+", file: "+file+", data.size: "+in.available()+", targetFile: "+targetFile);
BufferedOutputStream out = null;
try {
targetFile.getParentFile().mkdirs(); // Will create parent directories if not exists
targetFile.createNewFile();
out = new BufferedOutputStream(new FileOutputStream(targetFile,false),BUFSIZ);
} catch(Exception e) {
System.err.println("CTweb doPut, cannot create target file: "+targetFile+", exception: "+e);
return;
}
// read/write response
int length;
while ((length = in.read(CTbuffer)) > 0) {
out.write(CTbuffer, 0, length);
}
out.flush();
in.close();
out.close();
// trim (loop) if spec
if(keepTime > 0.) {
double now = (double)(System.currentTimeMillis())/1000.;
double checkDelta = 60.; // default is 60s
if(checkDelta >= keepTime) checkDelta = keepTime;
if(now > checkDelta) { // no thrash
double oldTime = now - keepTime;
String trimFolder = rootFolder+File.separator+source;
if(debug)
System.err.println("dotrim, folder: "+trimFolder+", now: "+now+", oldTime: "+oldTime+", keepTime: "+keepTime+", file: "+file+", target: "+targetFile);
try {
new CTwriter(trimFolder).dotrim(oldTime, false);
} catch(IOException ioe) {
System.err.println("dotrim folder: "+trimFolder+", now: "+now+", oldTime: "+oldTime+", keepTime: "+keepTime+", file: "+file+", target: "+targetFile+", source: "+source);
}
lastTime = now;
}
}
}
else { // use CTwriter API to construct time-folder hierarchy...
try {
CTwriter ctw = CTwriters.get(source);
// see if time is specified as URL parameter ?t=1234
String param = request.getParameter("t");
if(param != null) {
double stime = Double.parseDouble(param);
if(stime > 0) ctw.setTime(stime);
}
else { // params: ?b=1234567890123&dt=100&i=%d
String base = request.getParameter("b"); // base (start) time integer-milliseconds since epoch
String dt = request.getParameter("dt"); // delta-time per frame, integer-milliseconds
String idx = request.getParameter("i"); // frame counter
if(base!=null && dt!=null && idx!=null) {
long stime = Long.parseLong(base) + Long.parseLong(dt) * Long.parseLong(idx);
if(stime > 0) ctw.setTime(stime);
if(debug) System.err.println("base: "+base+", dt: "+dt+", i: "+idx+", stime: "+stime);
}
}
// if not set (stime==0), by default ctw will auto TOD
byte[] buffer = new byte[BUFSIZ];
int ngot = 0, nget = 0;
while ((nget = in.read(buffer, ngot, (BUFSIZ-ngot))) > 0) {
ngot += nget;
}
in.close();
byte[] buffer2 = new byte[ngot];
System.arraycopy(buffer, 0, buffer2, 0, ngot); // truncate (else ctw writes whole-size of buffer)
ctw.putData(file, buffer2);
if(!preCache) ctw.flush();
// System.gc();
if(debug) System.err.println("PUT source: "+source+", file: "+file+", request: "+request.getRequestURI()+", keepTime: "+keepTime);
}
catch(Exception e) {
System.err.println("Error on PUT CTwrite! "+e.getMessage());
}
}
}
}
//---------------------------------------------------------------------------------
private static void formHeader(HttpServletResponse response, double startTime, double endTime, double oldTime, double newTime, double lagTime) {
response.addHeader("time", formatTime(startTime)); // sec
// response.addHeader("time", formatTime(endTime)); // sec
response.addHeader("Last-Modified", ""+new Date((long)(1000*endTime)).toGMTString()); // msec
double duration = endTime - startTime;
response.addHeader("duration", formatTime(duration));
response.addHeader("X-Duration", formatTime(duration)); // compatible with WebTurbine
response.addHeader("oldest", formatTime(oldTime));
response.addHeader("newest", formatTime(newTime));
response.addHeader("lagtime",formatTime(lagTime));
response.addHeader("cache-control", "private, max-age=3600"); // enable browse cache
if(debug)
System.err.println("+++CTweb: time: "+startTime+", endTime: "+endTime+", duration: "+duration+", oldest: "+oldTime+", newest: "+newTime+", hlag: "+lagTime);
}
//---------------------------------------------------------------------------------
private static void formResponse(HttpServletResponse resp, StringBuilder sbresp) {
resp.addHeader("Access-Control-Allow-Origin", "*"); // CORS enable
resp.addHeader("Access-Control-Allow-Methods", "GET, POST, HEAD, OPTIONS"); // CORS enable
// resp.addHeader("Access-Control-Expose-Headers", "oldest,newest,duration,time,lagtime,folders");
resp.addHeader("Access-Control-Expose-Headers", "oldest,newest,duration,time,lagtime");
if(sbresp == null) return;
try {
resp.getWriter().println(sbresp.toString());
} catch (IOException e) {
e.printStackTrace();
}
}
private static String formatTime(double time) {
if(((long)time) == time) return(Long.toString((long)time));
else return Double.toString(time);
// else return new DecimalFormat("0.000").format(time); // loses precision
}
private static void printHeader(StringBuilder response, String path, String uplevel) {
try {
String title = "Directory listing for: "+path;
response.append("<head><title>"+title+"</title></head>");
if(uplevel != null) response.append("<a href=\""+uplevel+"\">[Up one level]</a><br>");
response.append("<h3>"+title+"</h3>");
} catch(Exception e) {
System.err.println("oops, exception: "+e);
}
}
private static String mimeType(String fname, String deftype) {
String mime = deftype;
if (fname.toLowerCase().endsWith(".css")) mime = "text/css";
else if (fname.toLowerCase().endsWith(".js")) mime = "application/javascript";
else if (fname.toLowerCase().endsWith(".jpg")) mime = "image/jpeg";
else if (fname.toLowerCase().endsWith(".png")) mime = "image/png";
else if (fname.toLowerCase().endsWith(".wav")) mime = "audio/wav";
else if (fname.toLowerCase().endsWith(".csv")) mime = "text/css";
if(debug) System.err.println("fname: "+fname+", mime type: "+mime);
return mime;
}
//---------------------------------------------------------------------------------
// Scale image.jpg to smaller size to save bandwidth
private static byte[] scale(byte[] bdata, int scaleImage) throws Exception {
if(scaleImage <= 1) return bdata;
BufferedImage img=ImageIO.read(new ByteArrayInputStream(bdata));
int targetWidth = img.getWidth()/scaleImage;
int targetHeight = img.getHeight()/scaleImage;
int type = (img.getTransparency() == Transparency.OPAQUE) ? BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB;
BufferedImage ret = img;
BufferedImage scratchImage = null;
Graphics2D g2 = null;
int w = img.getWidth();
int h = img.getHeight();
int prevW = w;
int prevH = h;
do {
if (w > targetWidth) {
w /= 2;
w = (w < targetWidth) ? targetWidth : w;
}
if (h > targetHeight) {
h /= 2;
h = (h < targetHeight) ? targetHeight : h;
}
if (scratchImage == null) {
scratchImage = new BufferedImage(w, h, type);
g2 = scratchImage.createGraphics();
}
g2.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g2.drawImage(ret, 0, 0, w, h, 0, 0, prevW, prevH, null);
prevW = w;
prevH = h;
ret = scratchImage;
}
while (w != targetWidth || h != targetHeight);
if (g2 != null) g2.dispose();
if (targetWidth != ret.getWidth() || targetHeight != ret.getHeight()) {
scratchImage = new BufferedImage(targetWidth, targetHeight, type);
g2 = scratchImage.createGraphics();
g2.drawImage(ret, 0, 0, null);
g2.dispose();
ret = scratchImage;
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write( ret, "jpg", baos );
baos.flush();
byte[] bdata2 = baos.toByteArray();
baos.close();
if(debug) System.err.println("Scale image "+scaleImage+"x, "+bdata.length+" to "+bdata2.length+" bytes");
return bdata2;
}
//---------------------------------------------------------------------------------
private static void loadProps() {
if(CTwebPropsFile == null) return;
try {
if(CTwebProps==null) CTwebProps = new Properties();
InputStream is = new FileInputStream(CTwebPropsFile);
CTwebProps.load(is);
is.close();
}
catch(IOException ioe) {
System.err.println("Warning: could not load properties file: "+CTwebPropsFile);
}
}
}
| apache-2.0 |
youdonghai/intellij-community | platform/util/src/com/intellij/util/messages/Topic.java | 5012 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: max
* Date: Oct 22, 2006
* Time: 9:49:16 PM
*/
package com.intellij.util.messages;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
/**
* Defines messaging endpoint within particular {@link MessageBus bus}.
*
* @param <L> type of the interface that defines contract for working with the particular topic instance
*/
public class Topic<L> {
private final String myDisplayName;
private final Class<L> myListenerClass;
private final BroadcastDirection myBroadcastDirection;
public Topic(@NonNls @NotNull String displayName, @NotNull Class<L> listenerClass) {
this(displayName, listenerClass, BroadcastDirection.TO_CHILDREN);
}
public Topic(@NonNls @NotNull String displayName, @NotNull Class<L> listenerClass, final BroadcastDirection broadcastDirection) {
myDisplayName = displayName;
myListenerClass = listenerClass;
myBroadcastDirection = broadcastDirection;
}
/**
* @return human-readable name of the current topic. Is intended to be used in informational/logging purposes only
*/
@NotNull
@NonNls
public String getDisplayName() {
return myDisplayName;
}
/**
* Allows to retrieve class that defines contract for working with the current topic. Either publishers or subscribers use it:
* <ul>
* <li>
* publisher {@link MessageBus#syncPublisher(Topic) receives} object that IS-A target interface from the messaging infrastructure.
* It calls target method with the target arguments on it then (method of the interface returned by the current method);
* </li>
* <li>
* the same method is called on handlers of all {@link MessageBusConnection#subscribe(Topic, Object) subscribers} that
* should receive the message;
* </li>
* </ul>
*
* @return class of the interface that defines contract for working with the current topic
*/
@NotNull
public Class<L> getListenerClass() {
return myListenerClass;
}
public String toString() {
return myDisplayName;
}
public static <L> Topic<L> create(@NonNls @NotNull String displayName, @NotNull Class<L> listenerClass) {
return new Topic<L>(displayName, listenerClass);
}
public static <L> Topic<L> create(@NonNls @NotNull String displayName, @NotNull Class<L> listenerClass, BroadcastDirection direction) {
return new Topic<L>(displayName, listenerClass, direction);
}
/**
* @return broadcasting strategy configured for the current topic. Default value is {@link BroadcastDirection#TO_CHILDREN}
* @see BroadcastDirection
*/
public BroadcastDirection getBroadcastDirection() {
return myBroadcastDirection;
}
/**
* {@link MessageBus Message buses} may be organised into {@link MessageBus#getParent() hierarchies}. That allows to provide
* additional messaging features like {@code 'broadcasting'}. Here it means that messages sent to particular topic within
* particular message bus may be dispatched to subscribers of the same topic within another message buses.
* <p/>
* Current enum holds available broadcasting options.
*/
public enum BroadcastDirection {
/**
* The message is dispatched to all subscribers of the target topic registered within the child message buses.
* <p/>
* Example:
* <pre>
* parent-bus <--- topic1
* / \
* / \
* topic1 ---> child-bus1 child-bus2 <--- topic1
* </pre>
* <p/>
* Here subscribers of the {@code 'topic1'} registered within the {@code 'child-bus1'} and {@code 'child-bus2'}
* will receive the message sent to the {@code 'topic1'} topic at the {@code 'parent-bus'}.
*/
TO_CHILDREN,
/**
* No broadcasting is performed for the
*/
NONE,
/**
* The message send to particular topic at particular bus is dispatched to all subscribers of the same topic within the parent bus.
* <p/>
* Example:
* <pre>
* parent-bus <--- topic1
* |
* child-bus <--- topic1
* </pre>
* <p/>
* Here subscribers of the {@code topic1} registered within {@code 'parent-bus'} will receive messages posted
* to the same topic within {@code 'child-bus'}.
*/
TO_PARENT
}
} | apache-2.0 |
devjin24/howtomcatworks | bookrefer/jakarta-tomcat-4.1.12-src/catalina/src/share/org/apache/catalina/connector/http/HttpResponseStream.java | 7918 | /*
* $Header: /home/cvs/jakarta-tomcat-4.0/catalina/src/share/org/apache/catalina/connector/http/HttpResponseStream.java,v 1.14 2002/03/18 07:15:40 remm Exp $
* $Revision: 1.14 $
* $Date: 2002/03/18 07:15:40 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Tomcat", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* [Additional notices, if required by prior licensing conditions]
*
*/
package org.apache.catalina.connector.http;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.apache.catalina.Response;
import org.apache.catalina.connector.ResponseStream;
/**
* Response stream for the HTTP/1.1 connector. This stream will automatically
* chunk the answer if using HTTP/1.1 and no Content-Length has been properly
* set.
*
* @author <a href="mailto:remm@apache.org">Remy Maucherat</a>
* @deprecated
*/
public final class HttpResponseStream extends ResponseStream {
// ----------------------------------------------------------- Constructors
private static final int MAX_CHUNK_SIZE = 4096;
private static final String CRLF = "\r\n";
// ----------------------------------------------------------- Constructors
/**
* Construct a servlet output stream associated with the specified Request.
*
* @param response The associated response
*/
public HttpResponseStream(HttpResponseImpl response) {
super(response);
checkChunking(response);
checkHead(response);
}
// ----------------------------------------------------- Instance Variables
/**
* True if chunking is allowed.
*/
private boolean useChunking;
/**
* True if printing a chunk.
*/
private boolean writingChunk;
/**
* True if no content should be written.
*/
private boolean writeContent;
// -------------------------------------------- ServletOutputStream Methods
/**
* Write the specified byte to our output stream.
*
* @param b The byte to be written
*
* @exception IOException if an input/output error occurs
*/
public void write(int b)
throws IOException {
if (suspended)
return;
if (!writeContent)
return;
if (useChunking && !writingChunk) {
writingChunk = true;
try {
print("1\r\n");
super.write(b);
println();
} finally {
writingChunk = false;
}
} else {
super.write(b);
}
}
/**
* Write the specified byte array.
*/
public void write(byte[] b, int off, int len)
throws IOException {
if (suspended)
return;
if (!writeContent)
return;
if (useChunking && !writingChunk) {
if (len > 0) {
writingChunk = true;
try {
println(Integer.toHexString(len));
super.write(b, off, len);
println();
} finally {
writingChunk = false;
}
}
} else {
super.write(b, off, len);
}
}
/**
* Close this output stream, causing any buffered data to be flushed and
* any further output data to throw an IOException.
*/
public void close() throws IOException {
if (suspended)
throw new IOException
(sm.getString("responseStream.suspended"));
if (!writeContent)
return;
if (useChunking) {
// Write the final chunk.
writingChunk = true;
try {
print("0\r\n\r\n");
} finally {
writingChunk = false;
}
}
super.close();
}
// -------------------------------------------------------- Package Methods
void checkChunking(HttpResponseImpl response) {
// If any data has already been written to the stream, we must not
// change the chunking mode
if (count != 0)
return;
// Check the basic cases in which we chunk
useChunking =
(!response.isCommitted()
&& response.getContentLength() == -1
&& response.getStatus() != HttpServletResponse.SC_NOT_MODIFIED);
if (!response.isChunkingAllowed() && useChunking) {
// If we should chunk, but chunking is forbidden by the connector,
// we close the connection
response.setHeader("Connection", "close");
}
// Don't chunk is the connection will be closed
useChunking = (useChunking && !response.isCloseConnection());
if (useChunking) {
response.setHeader("Transfer-Encoding", "chunked");
} else if (response.isChunkingAllowed()) {
response.removeHeader("Transfer-Encoding", "chunked");
}
}
protected void checkHead(HttpResponseImpl response) {
HttpServletRequest servletRequest =
(HttpServletRequest) response.getRequest();
if ("HEAD".equals(servletRequest.getMethod())) {
writeContent = false;
} else {
writeContent = true;
}
}
}
| apache-2.0 |
atbashEE/jsf-renderer-extensions | jerry/src/main/java/be/atbash/ee/jsf/jerry/component/ComponentInitializerManager.java | 5084 | /*
* Copyright 2014-2018 Rudy De Busscher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package be.atbash.ee.jsf.jerry.component;
import be.atbash.ee.jsf.jerry.storage.ComponentStorage;
import be.atbash.ee.jsf.jerry.utils.InvocationOrderedArtifactsProvider;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import javax.faces.component.UIComponent;
import javax.faces.component.UIData;
import javax.faces.context.FacesContext;
import javax.inject.Inject;
import java.util.List;
import java.util.Map;
/**
*
*/
@ApplicationScoped
public class ComponentInitializerManager {
private static String REPEATED_COMPONENT = ComponentInitializerManager.class.getName() + ".REPEATED_COMPONENT";
private List<ComponentInitializer> initializers;
@Inject
private ComponentStorage componentStorage;
@PostConstruct
public void init() {
initializers = InvocationOrderedArtifactsProvider.getComponentInitializers();
}
public void performInitialization(FacesContext facesContext, UIComponent uiComponent) {
if (notAlreadyInitialized(uiComponent)) {
boolean hasInitializer = performInit(facesContext, uiComponent);
if (!hasInitializer) {
// No initializer who matches the component, don't try again.
setInitialized(uiComponent);
} else {
if (!isRepeated(uiComponent)) {
// Initializer but not repeated, so only once is OK.
setInitialized(uiComponent);
}
}
}
}
private boolean isRepeated(UIComponent uiComponent) {
boolean result = checkRepeatableInitializer(uiComponent);
if (!result) {
// No RepeatableComponentInitializer used, check if it is within ui:repeat or UIData component.
// See if we have determined the RepeatedComponent stuff before.
result = checkRepeatedComponentFlag(uiComponent);
if (!result) {
// Determine the RepeatedComponent stuff
result = checkRepeatedComponent(uiComponent);
if (result) {
// Keep the flag for further reference.
setRepeatedComponentFlag(uiComponent);
}
}
}
return result;
}
private boolean checkRepeatedComponent(UIComponent uiComponent) {
boolean result = uiComponent.getClass().getName().endsWith(".UIRepeat")
|| (uiComponent instanceof UIData);
if (!result && uiComponent.getParent() != null) {
result = checkRepeatedComponent(uiComponent.getParent());
}
return result;
}
private void setInitialized(UIComponent uiComponent) {
uiComponent.getAttributes().put(ComponentInitializer.class.getName(), Boolean.TRUE);
}
private boolean notAlreadyInitialized(UIComponent uiComponent) {
return !uiComponent.getAttributes().containsKey(ComponentInitializer.class.getName());
}
private boolean performInit(FacesContext facesContext, UIComponent uiComponent) {
String viewId = facesContext.getViewRoot().getViewId();
String clientId = uiComponent.getClientId(facesContext);
Map<String, Object> componentInfo = componentStorage.getComponentInfo(viewId, clientId);
boolean hasInitializer = false;
for (ComponentInitializer initializer : initializers) {
if (initializer.isSupportedComponent(uiComponent)) {
initializer.configureComponent(facesContext, uiComponent, componentInfo);
hasInitializer = true;
if (initializer instanceof RepeatableComponentInitializer) {
setRepeatableInitializer(uiComponent);
}
}
}
return hasInitializer;
}
private void setRepeatableInitializer(UIComponent uiComponent) {
uiComponent.getAttributes().put(RepeatableComponentInitializer.class.getName(), Boolean.TRUE);
}
private boolean checkRepeatableInitializer(UIComponent uiComponent) {
return uiComponent.getAttributes().containsKey(RepeatableComponentInitializer.class.getName());
}
private void setRepeatedComponentFlag(UIComponent uiComponent) {
uiComponent.getAttributes().put(REPEATED_COMPONENT, Boolean.TRUE);
}
private boolean checkRepeatedComponentFlag(UIComponent uiComponent) {
return uiComponent.getAttributes().containsKey(REPEATED_COMPONENT);
}
}
| apache-2.0 |
cf0566/CarMarket | src/com/easemob/chatuidemo/UserProfileManager.java | 4884 | package com.easemob.chatuidemo;
import java.util.ArrayList;
import java.util.List;
import android.content.Context;
import com.easemob.EMValueCallBack;
import com.easemob.applib.controller.HXSDKHelper.HXSyncListener;
import com.easemob.applib.utils.HXPreferenceUtils;
import com.easemob.chat.EMChat;
import com.easemob.chat.EMChatManager;
import com.easemob.chatuidemo.domain.User;
import com.easemob.chatuidemo.parse.ParseManager;
public class UserProfileManager {
/**
* application context
*/
protected Context appContext = null;
/**
* init flag: test if the sdk has been inited before, we don't need to init
* again
*/
private boolean sdkInited = false;
/**
* HuanXin sync contact nick and avatar listener
*/
private List<HXSyncListener> syncContactInfosListeners;
private boolean isSyncingContactInfosWithServer = false;
private User currentUser;
public UserProfileManager() {
}
public synchronized boolean onInit(Context context) {
if (sdkInited) {
return true;
}
ParseManager.getInstance().onInit(context);
syncContactInfosListeners = new ArrayList<HXSyncListener>();
sdkInited = true;
return true;
}
public void addSyncContactInfoListener(HXSyncListener listener) {
if (listener == null) {
return;
}
if (!syncContactInfosListeners.contains(listener)) {
syncContactInfosListeners.add(listener);
}
}
public void removeSyncContactInfoListener(HXSyncListener listener) {
if (listener == null) {
return;
}
if (syncContactInfosListeners.contains(listener)) {
syncContactInfosListeners.remove(listener);
}
}
public void asyncFetchContactInfosFromServer(List<String> usernames, final EMValueCallBack<List<User>> callback) {
if (isSyncingContactInfosWithServer) {
return;
}
isSyncingContactInfosWithServer = true;
ParseManager.getInstance().getContactInfos(usernames, new EMValueCallBack<List<User>>() {
@Override
public void onSuccess(List<User> value) {
isSyncingContactInfosWithServer = false;
// in case that logout already before server returns,we should
// return immediately
if (!EMChat.getInstance().isLoggedIn()) {
return;
}
if (callback != null) {
callback.onSuccess(value);
}
}
@Override
public void onError(int error, String errorMsg) {
isSyncingContactInfosWithServer = false;
if (callback != null) {
callback.onError(error, errorMsg);
}
}
});
}
public void notifyContactInfosSyncListener(boolean success) {
for (HXSyncListener listener : syncContactInfosListeners) {
listener.onSyncSucess(success);
}
}
public boolean isSyncingContactInfoWithServer() {
return isSyncingContactInfosWithServer;
}
synchronized void reset() {
isSyncingContactInfosWithServer = false;
currentUser = null;
HXPreferenceUtils.getInstance().removeCurrentUserInfo();
}
public synchronized User getCurrentUserInfo() {
if (currentUser == null) {
String username = EMChatManager.getInstance().getCurrentUser();
currentUser = new User(username);
String nick = getCurrentUserNick();
currentUser.setNick((nick != null) ? nick : username);
currentUser.setAvatar(getCurrentUserAvatar());
}
return currentUser;
}
public boolean updateParseNickName(final String nickname) {
boolean isSuccess = ParseManager.getInstance().updateParseNickName(nickname);
if (isSuccess) {
setCurrentUserNick(nickname);
}
return isSuccess;
}
public String uploadUserAvatar(byte[] data) {
String avatarUrl = ParseManager.getInstance().uploadParseAvatar(data);
if (avatarUrl != null) {
setCurrentUserAvatar(avatarUrl);
}
return avatarUrl;
}
public void asyncGetCurrentUserInfo() {
ParseManager.getInstance().asyncGetCurrentUserInfo(new EMValueCallBack<User>() {
@Override
public void onSuccess(User value) {
setCurrentUserNick(value.getNick());
setCurrentUserAvatar(value.getAvatar());
}
@Override
public void onError(int error, String errorMsg) {
}
});
}
public void asyncGetUserInfo(final String username,final EMValueCallBack<User> callback){
ParseManager.getInstance().asyncGetUserInfo(username, callback);
}
private void setCurrentUserNick(String nickname) {
getCurrentUserInfo().setNick(nickname);
HXPreferenceUtils.getInstance().setCurrentUserNick(nickname);
}
private void setCurrentUserAvatar(String avatar) {
getCurrentUserInfo().setAvatar(avatar);
HXPreferenceUtils.getInstance().setCurrentUserAvatar(avatar);
}
private String getCurrentUserNick() {
return HXPreferenceUtils.getInstance().getCurrentUserNick();
}
private String getCurrentUserAvatar() {
return HXPreferenceUtils.getInstance().getCurrentUserAvatar();
}
}
| apache-2.0 |
jimmidyson/quickstarts | apps/jbpm-designer/src/test/java/io/fabric8/apps/jbpm/designer/JbpmDesignerKubernetesTest.java | 1902 | /*
* Copyright 2005-2015 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.apps.jbpm.designer;
import io.fabric8.arquillian.kubernetes.Constants;
import io.fabric8.arquillian.kubernetes.Session;
import io.fabric8.kubernetes.api.KubernetesClient;
import io.fabric8.kubernetes.api.model.Pod;
import org.assertj.core.api.Condition;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.junit.Test;
import org.junit.runner.RunWith;
import static io.fabric8.kubernetes.assertions.Assertions.assertThat;
@RunWith(Arquillian.class)
public class JbpmDesignerKubernetesTest {
@ArquillianResource
KubernetesClient client;
@ArquillianResource
Session session;
@Test
public void testInfluxDB() throws Exception {
String serviceName = "jbpm-designer";
assertThat(client).replicationController(serviceName).isNotNull();
assertThat(client).hasServicePort(serviceName, 80);
assertThat(client).pods()
.runningStatus()
.filterNamespace(session.getNamespace())
.haveAtLeast(1, new Condition<Pod>() {
@Override
public boolean matches(Pod podSchema) {
return true;
}
});
}
}
| apache-2.0 |
ning/serialization | writer/src/main/java/com/ning/metrics/serialization/writer/ObjectOutputterFactory.java | 2451 | /*
* Copyright 2010-2011 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.ning.metrics.serialization.writer;
import com.ning.metrics.serialization.event.EventSerializer;
import java.io.FileOutputStream;
import java.io.IOException;
public class ObjectOutputterFactory
{
public static ObjectOutputter createObjectOutputter(final FileOutputStream out, final SyncType type, final int batchSize) throws IOException
{
return createObjectOutputter(out, type, batchSize, new ObjectOutputEventSerializer());
}
/**
* @param out underlying FileOutputStream
* @param type type of outputter (flush, sync, ...)
* @param batchSize number of events between flushes or syncs
* @param eventSerializer does not have to be tied to 'out'. We will call eventSerializer.open(out) later.
* If eventSerializer == null, it's the same as calling the default createObjectOutputter()
* @return a new outputter object of type type
* @throws IOException when unable to open the FileOutputStream out
*/
public static ObjectOutputter createObjectOutputter(
final FileOutputStream out,
final SyncType type,
final int batchSize,
final EventSerializer eventSerializer
) throws IOException
{
if (eventSerializer == null) {
return createObjectOutputter(out, type, batchSize);
}
switch (type) {
case NONE:
return new DefaultObjectOutputter(out, eventSerializer);
case FLUSH:
return new FlushingObjectOutputter(out, eventSerializer, batchSize);
case SYNC:
return new SyncingObjectOutputter(out, eventSerializer, batchSize);
}
throw new IllegalArgumentException("Unable to construct ObjectOutputter given type" + type);
}
}
| apache-2.0 |
vipien/traccar | test/org/traccar/protocol/Gps103ProtocolDecoderTest.java | 8821 | package org.traccar.protocol;
import org.junit.Test;
import org.traccar.ProtocolTest;
public class Gps103ProtocolDecoderTest extends ProtocolTest {
@Test
public void testDecode() throws Exception {
Gps103ProtocolDecoder decoder = new Gps103ProtocolDecoder(new Gps103Protocol());
verifyAttributes(decoder, text(
"imei:862106021237716,ac alarm,1611291645,,F,204457.000,A,1010.2783,N,06441.0274,W,0.00,,;"));
verifyAttributes(decoder, text(
"imei:359710049057798,OBD,161003192752,1785,,,0,54,96.47%,75,20.00%,1892,0.00,P0134,P0571,,;"));
verifyAttributes(decoder, text(
"imei:359710049090138,OBD,160629022949,51442,0.00,15.88,5632,122,40.39%,95,0.00%,2101,13.80,,,,;"));
verifyPosition(decoder, text(
"imei:359710049090138,tracker,160629022948,,F,182949.000,A,4043.8839,N,11328.8029,W,65.26,271.82,,1,0,31.37%,51442,;"));
verifyAttributes(decoder, text(
"imei:359710049042014,001,160615040011,,F,040011.000,A,2833.0957,N,07711.9465,E,0.01,215.33,,0,,,,;"));
verifyAttributes(decoder, text(
"imei:359710049028435,OBD,160316053657,70430,,,0,49,60.00%,46,19.22%,859,0.00,U1108,,,;"));
verifyPosition(decoder, text(
"359769031878322imei:359769031878322,tracker,1602160718,2,F,221811.000,A,1655.2193,S,14546.6722,E,0.00,,"));
verifyNothing(decoder, text(
"imei:865328021049167,OBD,141118115036,,,0.0,,000,0.0%,+,0.0%,00000,,,,,"));
verifyAttributes(decoder, text(
"imei:359710049032874,OBD,160208152900,13555,,,45,0,24.71%,35,13.73%,1230,14.13,U1108,,,"));
verifyAttributes(decoder, text(
"imei:359710049064398,OBD,160101035156,17887,0.00,17.06,0,0,0.00%,0,0.00%,16383,10.82,,,,"));
verifyPosition(decoder, text(
"imei:868683020235846,rfid,160202091347,49121185,F,011344.000,A,0447.7273,N,07538.9934,W,0.00,0,,0,0,0.00%,,"));
verifyNotNull(decoder, text(
"imei:359710049075097,help me,,,L,,,113b,,558f,,,,,0,0,,,"));
verifyNotNull(decoder, text(
"imei:359710041100000,tracker,000000000,,L,,,fa8,,c9af,,,,,0,0,0.00%,,"));
verifyNotNull(decoder, text(
"imei:863070016871385,tracker,0000000119,,L,,,0FB6,,CB5D,,,"));
verifyPosition(decoder, text(
"imei:359710045559474,tracker,151030080103,,F,000101.000,A,5443.3834,N,02512.9071,E,0.00,0;"),
position("2015-10-30 00:01:01.000", true, 54.72306, 25.21512));
verifyPosition(decoder, text(
"imei:359710049092324,tracker,151027025958,,F,235957.000,A,2429.5156,N,04424.5828,E,0.01,27.91,,0,0,,,;"),
position("2015-10-26 23:59:57.000", true, 24.49193, 44.40971));
verifyPosition(decoder, text(
"imei:865328021058861,tracker,151027041419,,F,011531.000,A,6020.2979,N,02506.1940,E,0.49,113.30,,0,0,0.0%,,;"),
position("2015-10-27 01:15:31.000", true, 60.33830, 25.10323));
// Log on request
verifyNothing(decoder, text(
"##,imei:359586015829802,A"));
// Heartbeat package
verifyNothing(decoder, text(
"359586015829802"));
// No GPS signal
verifyNothing(decoder, text(
"imei:359586015829802,tracker,000000000,13554900601,L,;"));
verifyPosition(decoder, text(
"imei:869039001186913,tracker,1308282156,0,F,215630.000,A,5602.11015,N,9246.30767,E,1.4,,175.9,"));
verifyPosition(decoder, text(
"imei:359710040656622,tracker,13/02/27 23:40,,F,125952.000,A,3450.9430,S,13828.6753,E,0.00,0"));
verifyPosition(decoder, text(
"imei:359710040565419,tracker,13/05/25 14:23,,F,062209.000,A,0626.0411,N,10149.3904,E,0.00,0"));
verifyPosition(decoder, text(
"imei:353451047570260,tracker,1302110948,,F,144807.000,A,0805.6615,S,07859.9763,W,0.00,,"));
verifyPosition(decoder, text(
"imei:359587016817564,tracker,1301251602,,F,080251.000,A,3223.5832,N,11058.9449,W,0.03,"));
verifyPosition(decoder, text(
"imei:359587016817564,tracker,1301251602,,F,080251.000,A,3223.5832,N,11058.9449,W,,"));
verifyPosition(decoder, text(
"imei:012497000208821,tracker,1301080525,,F,212511.000,A,2228.5279,S,06855.6328,W,18.62,268.98,"));
verifyPosition(decoder, text(
"imei:012497000208821,tracker,1301072224,,F,142411.077,A,2227.0739,S,06855.2912,,0,0,"));
verifyPosition(decoder, text(
"imei:012497000431811,tracker,1210260609,,F,220925.000,A,0845.5500,N,07024.7673,W,0.00,,"));
verifyPosition(decoder, text(
"imei:100000000000000,help me,1004171910,,F,010203.000,A,0102.0003,N,00102.0003,E,1.02,"));
verifyPosition(decoder, text(
"imei:353451040164707,tracker,1105182344,+36304665439,F,214418.000,A,4804.2222,N,01916.7593,E,0.37,"));
verifyPosition(decoder, text(
"imei:353451042861763,tracker,1106132241,,F,144114.000,A,2301.9052,S,04909.3676,W,0.13,"));
verifyPosition(decoder, text(
"imei:359587010124900,tracker,0809231929,13554900601,F,112909.397,A,2234.4669,N,11354.3287,E,0.11,321.53,"));
verifyPosition(decoder, text(
"imei:353451049926460,tracker,1208042043,123456 99008026,F,124336.000,A,3509.8668,N,03322.7636,E,0.00,,"));
// SOS alarm
verifyPosition(decoder, text(
"imei:359586015829802,help me,0809231429,13554900601,F,062947.294,A,2234.4026,N,11354.3277,E,0.00,"));
// Low battery alarm
verifyPosition(decoder, text(
"imei:359586015829802,low battery,0809231429,13554900601,F,062947.294,A,2234.4026,N,11354.3277,E,0.00,"));
// Geo-fence alarm
verifyPosition(decoder, text(
"imei:359586015829802,stockade,0809231429,13554900601,F,062947.294,A,2234.4026,N,11354.3277,E,0.00,"));
// Move alarm
verifyPosition(decoder, text(
"imei:359586015829802,move,0809231429,13554900601,F,062947.294,A,2234.4026,N,11354.3277,E,0.00,"));
// Over speed alarm
verifyPosition(decoder, text(
"imei:359586015829802,speed,0809231429,13554900601,F,062947.294,A,2234.4026,N,11354.3277,E,0.00,"));
verifyPosition(decoder, text(
"imei:863070010423167,tracker,1211051840,,F,104000.000,A,2220.6483,N,11407.6377,,0,0,"));
verifyPosition(decoder, text(
"imei:863070010423167,tracker,1211051951,63360926,F,115123.000,A,2220.6322,N,11407.5313,E,0.00,,"));
verifyPosition(decoder, text(
"imei:863070010423167,tracker,1211060621,,F,062152.000,A,2220.6914,N,11407.5506,E,15.85,347.84,"));
verifyPosition(decoder, text(
"imei:863070012698733,tracker,1303092334,,F,193427.000,A,5139.0369,N,03907.2791,E,0.00,,"));
verifyPosition(decoder, text(
"imei:869039001186913,tracker,130925065533,0,F,065533.000,A,5604.11015,N,9232.12238,E,0.0,,329.0,"));
verifyPosition(decoder, text(
"imei:359710041641581,acc alarm,1402231159,,F,065907.000,A,2456.2591,N,06708.8335,E,7.53,76.10,,1,0,0.03%,,"));
verifyPosition(decoder, text(
"imei:359710041641581,acc alarm,1402231159,,F,065907.000,A,2456.2591,N,06708.8335,E,7.53,76.10,,1,0,0.03%,,"));
verifyPosition(decoder, text(
"imei:313009071131684,tracker,1403211928,,F,112817.000,A,0610.1133,N,00116.5840,E,0.00,,,0,0,0.0,0.0,"));
verifyPosition(decoder, text(
"imei:866989771979791,tracker,140527055653,,F,215653.00,A,5050.33113,N,00336.98783,E,0.066,0"));
verifyPosition(decoder, text(
"imei:353552045375005,tracker,150401165832,61.0,F,31.0,A,1050.73696,N,10636.49489,E,8.0,,22.0,"));
verifyPosition(decoder, text(
"imei:353552045403597,tracker,150420050648,53.0,F,0.0,A,N,5306.64155,E,00700.77848,0.0,,1.0,;"));
verifyPosition(decoder, text(
"imei:353552045403597,tracker,150420051153,53.0,F,0.0,A,5306.64155,N,00700.77848,E,0.0,,1.0,;"));
verifyPosition(decoder, text(
"imei:359710047424644,tracker,150506224036,,F,154037.000,A,0335.2785,N,09841.1543,E,3.03,337.54,,0,0,45.16%,,;"));
verifyPosition(decoder, text(
"imei:865328023776874,acc off,150619152221,,F,072218.000,A,5439.8489,N,02518.5945,E,0.00,,,1,1,0.0,0.0,23.0,;"));
}
}
| apache-2.0 |
Org-Moli/MoliBackground | wk-app/wk-web/src/main/java/cn/wizzer/app/wx/modules/services/WxReplyTxtService.java | 224 | package cn.wizzer.app.wx.modules.services;
import cn.wizzer.framework.base.service.BaseService;
import cn.wizzer.app.wx.modules.models.Wx_reply_txt;
public interface WxReplyTxtService extends BaseService<Wx_reply_txt>{
}
| apache-2.0 |
ClarenceAu/log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/NameAbbreviator.java | 9346 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.pattern;
import java.util.ArrayList;
import java.util.List;
/**
* NameAbbreviator generates abbreviated logger and class names.
*/
public abstract class NameAbbreviator {
/**
* Default (no abbreviation) abbreviator.
*/
private static final NameAbbreviator DEFAULT = new NOPAbbreviator();
/**
* Gets an abbreviator.
* <p/>
* For example, "%logger{2}" will output only 2 elements of the logger name,
* "%logger{1.}" will output only the first character of the non-final elements in the name,
* "%logger(1~.2~} will output the first character of the first element, two characters of
* the second and subsequent elements and will use a tilde to indicate abbreviated characters.
*
* @param pattern abbreviation pattern.
* @return abbreviator, will not be null.
*/
public static NameAbbreviator getAbbreviator(final String pattern) {
if (pattern.length() > 0) {
// if pattern is just spaces and numbers then
// use MaxElementAbbreviator
final String trimmed = pattern.trim();
if (trimmed.isEmpty()) {
return DEFAULT;
}
int i = 0;
while (i < trimmed.length() && trimmed.charAt(i) >= '0'
&& trimmed.charAt(i) <= '9') {
i++;
}
//
// if all blanks and digits
//
if (i == trimmed.length()) {
return new MaxElementAbbreviator(Integer.parseInt(trimmed));
}
final ArrayList<PatternAbbreviatorFragment> fragments = new ArrayList<PatternAbbreviatorFragment>(5);
char ellipsis;
int charCount;
int pos = 0;
while (pos < trimmed.length() && pos >= 0) {
int ellipsisPos = pos;
if (trimmed.charAt(pos) == '*') {
charCount = Integer.MAX_VALUE;
ellipsisPos++;
} else {
if (trimmed.charAt(pos) >= '0' && trimmed.charAt(pos) <= '9') {
charCount = trimmed.charAt(pos) - '0';
ellipsisPos++;
} else {
charCount = 0;
}
}
ellipsis = '\0';
if (ellipsisPos < trimmed.length()) {
ellipsis = trimmed.charAt(ellipsisPos);
if (ellipsis == '.') {
ellipsis = '\0';
}
}
fragments.add(new PatternAbbreviatorFragment(charCount, ellipsis));
pos = trimmed.indexOf('.', pos);
if (pos == -1) {
break;
}
pos++;
}
return new PatternAbbreviator(fragments);
}
//
// no matching abbreviation, return defaultAbbreviator
//
return DEFAULT;
}
/**
* Gets default abbreviator.
*
* @return default abbreviator.
*/
public static NameAbbreviator getDefaultAbbreviator() {
return DEFAULT;
}
/**
* Abbreviates a name in a String.
*
* @param buf buffer, may not be null.
* @return The abbreviated String.
*/
public abstract String abbreviate(final String buf);
/**
* Abbreviator that simply appends full name to buffer.
*/
private static class NOPAbbreviator extends NameAbbreviator {
/**
* Constructor.
*/
public NOPAbbreviator() {
}
/**
* {@inheritDoc}
*/
@Override
public String abbreviate(final String buf) {
return buf;
}
}
/**
* Abbreviator that drops starting path elements.
*/
private static class MaxElementAbbreviator extends NameAbbreviator {
/**
* Maximum number of path elements to output.
*/
private final int count;
/**
* Create new instance.
*
* @param count maximum number of path elements to output.
*/
public MaxElementAbbreviator(final int count) {
this.count = count < 1 ? 1 : count;
}
/**
* Abbreviate name.
*
* @param buf The String to abbreviate.
* @return the abbreviated String.
*/
@Override
public String abbreviate(final String buf) {
// We subtract 1 from 'len' when assigning to 'end' to avoid out of
// bounds exception in return r.substring(end+1, len). This can happen if
// precision is 1 and the category name ends with a dot.
int end = buf.length() - 1;
for (int i = count; i > 0; i--) {
end = buf.lastIndexOf('.', end - 1);
if (end == -1) {
return buf;
}
}
return buf.substring(end + 1);
}
}
/**
* Fragment of an pattern abbreviator.
*/
private static class PatternAbbreviatorFragment {
/**
* Count of initial characters of element to output.
*/
private final int charCount;
/**
* Character used to represent dropped characters.
* '\0' indicates no representation of dropped characters.
*/
private final char ellipsis;
/**
* Creates a PatternAbbreviatorFragment.
*
* @param charCount number of initial characters to preserve.
* @param ellipsis character to represent elimination of characters,
* '\0' if no ellipsis is desired.
*/
public PatternAbbreviatorFragment(
final int charCount, final char ellipsis) {
this.charCount = charCount;
this.ellipsis = ellipsis;
}
/**
* Abbreviate element of name.
*
* @param buf buffer to receive element.
* @param startPos starting index of name element.
* @return starting index of next element.
*/
public int abbreviate(final StringBuilder buf, final int startPos) {
int nextDot = buf.toString().indexOf('.', startPos);
if (nextDot != -1) {
if (nextDot - startPos > charCount) {
buf.delete(startPos + charCount, nextDot);
nextDot = startPos + charCount;
if (ellipsis != '\0') {
buf.insert(nextDot, ellipsis);
nextDot++;
}
}
nextDot++;
}
return nextDot;
}
}
/**
* Pattern abbreviator.
*/
private static class PatternAbbreviator extends NameAbbreviator {
/**
* Element abbreviation patterns.
*/
private final PatternAbbreviatorFragment[] fragments;
/**
* Create PatternAbbreviator.
*
* @param fragments element abbreviation patterns.
*/
public PatternAbbreviator(final List<PatternAbbreviatorFragment> fragments) {
if (fragments.size() == 0) {
throw new IllegalArgumentException(
"fragments must have at least one element");
}
this.fragments = new PatternAbbreviatorFragment[fragments.size()];
fragments.toArray(this.fragments);
}
/**
* Abbreviates name.
*
* @param buf buffer that abbreviated name is appended.
*/
@Override
public String abbreviate(final String buf) {
//
// all non-terminal patterns are executed once
//
int pos = 0;
final StringBuilder sb = new StringBuilder(buf);
for (int i = 0; i < fragments.length - 1 && pos < buf.length();
i++) {
pos = fragments[i].abbreviate(sb, pos);
}
//
// last pattern in executed repeatedly
//
final PatternAbbreviatorFragment terminalFragment = fragments[fragments.length - 1];
while (pos < buf.length() && pos >= 0) {
pos = terminalFragment.abbreviate(sb, pos);
}
return sb.toString();
}
}
}
| apache-2.0 |
mikelalcon/bazel | src/test/java/com/google/devtools/build/lib/analysis/AnalysisUtilsTest.java | 1778 | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.analysis.AnalysisUtils.checkProvider;
import static org.junit.Assert.fail;
import com.google.auto.value.AutoValue;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class AnalysisUtilsTest {
@Test
public void checkProviderSucceedsOnClassAnnotatedWithAutoValue() {
checkProvider(AutoValuedClass.class);
}
@Test
public void checkProviderFailsOnClassGeneratredByAutoValue() {
try {
checkProvider(AutoValue_AnalysisUtilsTest_AutoValuedClass.class);
fail("Expected IllegalArgumentException, but nothing was thrown.");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage()).contains("generated by @AutoValue");
}
}
// Note: this has to be defined outside of checkProviderFailsOnClassGeneratredByAutoValue() so it
// can be static, which is required by @AutoValue.
@AutoValue
abstract static class AutoValuedClass implements TransitiveInfoProvider {
abstract int foo();
}
}
| apache-2.0 |
jaehong-kim/pinpoint | grpc/src/main/java/io/grpc/netty/LogIdServerListenerDelegator.java | 1340 | package io.grpc.netty;
import io.grpc.InternalLogId;
import io.grpc.internal.ServerListener;
import io.grpc.internal.ServerTransport;
import io.grpc.internal.ServerTransportListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LogIdServerListenerDelegator implements ServerListenerDelegator {
final Logger logger = LoggerFactory.getLogger(this.getClass());
@Override
public ServerListener wrapServerListener(final ServerListener serverListener) {
logger.info("ServerListener serverListener:{}", serverListener);
final ServerListener delegate = new ServerListener() {
@Override
public ServerTransportListener transportCreated(ServerTransport transport) {
final InternalLogId logId = transport.getLogId();
if (logger.isDebugEnabled()) {
logger.debug("transportCreated:{} {}", transport, logId);
}
final ServerTransportListener serverTransportListener = serverListener.transportCreated(transport);
return new LogIdAttachListener(serverTransportListener, logId.getId());
}
@Override
public void serverShutdown() {
serverListener.serverShutdown();
}
};
return delegate;
}
}
| apache-2.0 |
pdalbora/gosu-lang | gosu-test/src/main/java/gw/lang/annotations/MyAnnos.java | 209 | package gw.lang.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface MyAnnos {
MyAnno[] value();
}
| apache-2.0 |
antz29/closure-compiler | test/com/google/javascript/jscomp/CollapseAnonymousFunctionsTest.java | 3800 | /*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
/**
* Tests for {@link CollapseAnonymousFunctions}
*
*/
public class CollapseAnonymousFunctionsTest extends CompilerTestCase {
public CollapseAnonymousFunctionsTest() {
this.enableNormalize();
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new CollapseAnonymousFunctions(compiler);
}
public void testGlobalScope() {
test("var f = function(){}", "function f(){}");
}
public void testLocalScope1() {
test("function f(){ var x = function(){}; x() }",
"function f(){ function x(){} x() }");
}
public void testLocalScope2() {
test("function f(){ var x = function(){}; return x }",
"function f(){ function x(){} return x }");
}
public void testVarNotImmediatelyBelowScriptOrBlock1() {
testSame("if (x) var f = function(){}");
}
public void testVarNotImmediatelyBelowScriptOrBlock2() {
testSame("var x = 1;" +
"if (x == 1) {" +
" var f = function () { alert('b')}" +
"} else {" +
" f = function() { alert('c')}" +
"}" +
"f();");
}
public void testVarNotImmediatelyBelowScriptOrBlock3() {
testSame("var x = 1; if (x) {var f = function(){return x}; f(); x--;}");
}
public void testMultipleVar() {
test("var f = function(){}; var g = f", "function f(){} var g = f");
}
public void testMultipleVar2() {
test("var f = function(){}; var g = f; var h = function(){}",
"function f(){}var g = f;function h(){}");
}
public void testBothScopes() {
test("var x = function() { var y = function(){} }",
"function x() { function y(){} }");
}
public void testLocalScopeOnly1() {
test("if (x) var f = function(){ var g = function(){} }",
"if (x) var f = function(){ function g(){} }");
}
public void testLocalScopeOnly2() {
test("if (x) var f = function(){ var g = function(){} };",
"if (x) var f = function(){ function g(){} }");
}
public void testReturn() {
test("var f = function(x){return 2*x}; var g = f(2);",
"function f(x){return 2*x} var g = f(2)");
}
public void testAlert() {
test("var x = 1; var f = function(){alert(x)};",
"var x = 1; function f(){alert(x)}");
}
public void testRecursiveInternal1() {
testSame("var f = function foo() { foo() }");
}
public void testRecursiveInternal2() {
testSame("var f = function foo() { function g(){foo()} g() }");
}
public void testRecursiveExternal1() {
test("var f = function foo() { f() }",
"function f() { f() }");
}
public void testRecursiveExternal2() {
test("var f = function foo() { function g(){f()} g() }",
"function f() { function g(){f()} g() }");
}
public void testConstantFunction1() {
test("var FOO = function(){};FOO()",
"function FOO(){}FOO()");
}
public void testInnerFunction1() {
test(
"function f() { " +
" var x = 3; var y = function() { return 4; }; return x + y();" +
"}",
"function f() { " +
" function y() { return 4; } var x = 3; return x + y();" +
"}");
}
}
| apache-2.0 |
DiceHoldingsInc/orientdb | tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseListener.java | 2096 | /*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.console;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabaseListener;
public class OConsoleDatabaseListener implements ODatabaseListener {
OConsoleDatabaseApp console;
public OConsoleDatabaseListener(OConsoleDatabaseApp console) {
this.console = console;
}
public void onCreate(ODatabase iDatabase) {
}
public void onDelete(ODatabase iDatabase) {
}
public void onOpen(ODatabase iDatabase) {
}
public void onBeforeTxBegin(ODatabase iDatabase) {
}
public void onBeforeTxRollback(ODatabase iDatabase) {
}
public void onAfterTxRollback(ODatabase iDatabase) {
}
public void onBeforeTxCommit(ODatabase iDatabase) {
}
public void onAfterTxCommit(ODatabase iDatabase) {
}
public void onClose(ODatabase iDatabase) {
}
public boolean onCorruptionRepairDatabase(ODatabase iDatabase, final String iProblem, String iWhatWillbeFixed) {
final String answer = console.ask("\nDatabase seems corrupted:\n> " + iProblem + "\nAuto-repair will execute this action:\n> "
+ iWhatWillbeFixed + "\n\nDo you want to repair it (Y/n)? ");
return answer.length() == 0 || answer.equalsIgnoreCase("Y") || answer.equalsIgnoreCase("Yes");
}
}
| apache-2.0 |
MaTriXy/pocketknife | pocketknife-compiler/src/main/java/pocketknife/internal/codegen/builder/FragmentMethodBinding.java | 3411 | package pocketknife.internal.codegen.builder;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.MethodSpec;
import org.apache.commons.lang3.StringUtils;
import pocketknife.internal.codegen.BundleFieldBinding;
import pocketknife.internal.codegen.FieldBinding;
import pocketknife.internal.codegen.KeySpec;
import pocketknife.internal.codegen.MethodBinding;
import pocketknife.internal.codegen.TypeUtil;
import javax.lang.model.type.TypeMirror;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static javax.lang.model.element.Modifier.PUBLIC;
public class FragmentMethodBinding extends MethodBinding {
private static final String RETURN_VAR_NAME_ROOT = "fragment";
private static final String ARGS_VAR_NAME_ROOT = "args";
private final String name;
private final TypeMirror returnType;
private final List<BundleFieldBinding> fields = new ArrayList<BundleFieldBinding>();
public FragmentMethodBinding(String name, TypeMirror returnType) {
this.name = name;
this.returnType = returnType;
}
public void addField(BundleFieldBinding fieldBinding) {
if (fields.contains(fieldBinding)) {
throw new IllegalStateException("Cannot have multiple arguments named: " + fieldBinding.getName());
}
fields.add(fieldBinding);
}
@Override
public List<? extends FieldBinding> getFields() {
return fields;
}
@Override
public Set<KeySpec> getKeys() {
Set<KeySpec> keys = new LinkedHashSet<KeySpec>();
for (BundleFieldBinding field : fields) {
keys.add(field.getKey());
}
return keys;
}
@Override
public MethodSpec generateMethodSpec(TypeUtil typeUtil) {
String returnVarName = getReturnVarName(RETURN_VAR_NAME_ROOT);
String argsVarName = getReturnVarName(ARGS_VAR_NAME_ROOT);
MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder(name)
.addAnnotation(Override.class)
.addModifiers(PUBLIC)
.returns(ClassName.get(returnType))
.addStatement("$T $N = new $T()", ClassName.get(returnType), returnVarName, ClassName.get(returnType));
if (!fields.isEmpty()) {
methodBuilder.addStatement("$T $N = new $T()", ClassName.get(typeUtil.bundleType), argsVarName, ClassName.get(typeUtil.bundleType));
}
for (BundleFieldBinding fieldBinding : fields) {
methodBuilder.addParameter(ClassName.get(fieldBinding.getType()), fieldBinding.getName());
String keyValue;
String stmt = "$N.put$L(";
KeySpec key = fieldBinding.getKey();
if (StringUtils.isBlank(key.getName())) {
keyValue = key.getValue();
stmt = stmt.concat("$S");
} else {
keyValue = key.getName();
stmt = stmt.concat("$N");
}
stmt = stmt.concat(", $N)");
methodBuilder.addStatement(stmt, argsVarName, fieldBinding.getBundleType(), keyValue, fieldBinding.getName());
}
if (!fields.isEmpty()) {
methodBuilder.addStatement("$N.setArguments($N)", returnVarName, argsVarName);
}
methodBuilder.addStatement("return $N", returnVarName);
return methodBuilder.build();
}
}
| apache-2.0 |
IllusionRom-deprecated/android_platform_tools_idea | platform/platform-impl/src/com/intellij/openapi/wm/impl/status/EncodingPanel.java | 10287 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.wm.impl.status;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.actionSystem.impl.SimpleDataContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.impl.LoadTextUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileAdapter;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.VirtualFilePropertyEvent;
import com.intellij.openapi.vfs.encoding.ChangeFileEncodingAction;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.encoding.EncodingManagerImpl;
import com.intellij.openapi.vfs.encoding.EncodingUtil;
import com.intellij.openapi.vfs.impl.BulkVirtualFileListenerAdapter;
import com.intellij.openapi.wm.CustomStatusBarWidget;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.StatusBarWidget;
import com.intellij.ui.ClickListener;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Alarm;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.nio.charset.Charset;
/**
* @author cdr
*/
public class EncodingPanel extends EditorBasedWidget implements StatusBarWidget.Multiframe, CustomStatusBarWidget {
private final TextPanel myComponent;
private boolean actionEnabled;
private final Alarm update;
private volatile Reference<Editor> myEditor = new WeakReference<Editor>(null); // store editor here to avoid expensive and EDT-only getSelectedEditor() retrievals
public EncodingPanel(@NotNull final Project project) {
super(project);
update = new Alarm(this);
myComponent = new TextPanel() {
@Override
protected void paintComponent(@NotNull final Graphics g) {
super.paintComponent(g);
if (actionEnabled && getText() != null) {
final Rectangle r = getBounds();
final Insets insets = getInsets();
AllIcons.Ide.Statusbar_arrows.paintIcon(this, g, r.width - insets.right - AllIcons.Ide.Statusbar_arrows.getIconWidth() - 2,
r.height / 2 - AllIcons.Ide.Statusbar_arrows.getIconHeight() / 2);
}
}
};
new ClickListener() {
@Override
public boolean onClick(MouseEvent e, int clickCount) {
update();
showPopup(e);
return true;
}
}.installOn(myComponent);
myComponent.setBorder(WidgetBorder.INSTANCE);
}
@Nullable("returns null if charset set cannot be determined from content")
private static Charset cachedCharsetFromContent(final VirtualFile virtualFile) {
if (virtualFile == null) return null;
final Document document = FileDocumentManager.getInstance().getDocument(virtualFile);
if (document == null) return null;
return EncodingManager.getInstance().getCachedCharsetFromContent(document);
}
@Override
public void selectionChanged(@NotNull FileEditorManagerEvent event) {
if (ApplicationManager.getApplication().isUnitTestMode()) return;
VirtualFile newFile = event.getNewFile();
fileChanged(newFile);
}
private void fileChanged(VirtualFile newFile) {
FileEditor fileEditor = newFile == null ? null : FileEditorManager.getInstance(getProject()).getSelectedEditor(newFile);
Editor editor = fileEditor instanceof TextEditor ? ((TextEditor)fileEditor).getEditor() : null;
myEditor = new WeakReference<Editor>(editor);
update();
}
@Override
public void fileOpened(@NotNull FileEditorManager source, @NotNull VirtualFile file) {
fileChanged(file);
}
@Override
public StatusBarWidget copy() {
return new EncodingPanel(getProject());
}
@Override
@NotNull
public String ID() {
return "Encoding";
}
@Override
public WidgetPresentation getPresentation(@NotNull PlatformType type) {
return null;
}
@Override
public void install(@NotNull StatusBar statusBar) {
super.install(statusBar);
// should update to reflect encoding-from-content
EncodingManager.getInstance().addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName().equals(EncodingManagerImpl.PROP_CACHED_ENCODING_CHANGED)) {
Document document = evt.getSource() instanceof Document ? (Document)evt.getSource() : null;
updateForDocument(document);
}
}
}, this);
ApplicationManager.getApplication().getMessageBus().connect(this).subscribe(VirtualFileManager.VFS_CHANGES, new BulkVirtualFileListenerAdapter(new VirtualFileAdapter() {
@Override
public void propertyChanged(VirtualFilePropertyEvent event) {
if (VirtualFile.PROP_ENCODING.equals(event.getPropertyName())) {
updateForFile(event.getFile());
}
}
}));
EditorFactory.getInstance().getEventMulticaster().addDocumentListener(new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
Document document = e.getDocument();
updateForDocument(document);
}
}, this);
}
private void updateForDocument(@Nullable("null means update anyway") Document document) {
Editor selectedEditor = myEditor.get();
if (document != null && (selectedEditor == null || selectedEditor.getDocument() != document)) return;
update();
}
private void updateForFile(@Nullable("null means update anyway") VirtualFile file) {
if (file == null) {
update();
}
else {
updateForDocument(FileDocumentManager.getInstance().getCachedDocument(file));
}
}
private void showPopup(@NotNull MouseEvent e) {
if (!actionEnabled) {
return;
}
DataContext dataContext = getContext();
ListPopup popup = new ChangeFileEncodingAction().createPopup(dataContext);
if (popup != null) {
Dimension dimension = popup.getContent().getPreferredSize();
Point at = new Point(0, -dimension.height);
popup.show(new RelativePoint(e.getComponent(), at));
Disposer.register(this, popup); // destroy popup on unexpected project close
}
}
@NotNull
private DataContext getContext() {
Editor editor = getEditor();
DataContext parent = DataManager.getInstance().getDataContext((Component)myStatusBar);
return SimpleDataContext.getSimpleContext(CommonDataKeys.VIRTUAL_FILE.getName(), getSelectedFile(),
SimpleDataContext.getSimpleContext(CommonDataKeys.PROJECT.getName(), getProject(),
SimpleDataContext.getSimpleContext(PlatformDataKeys.CONTEXT_COMPONENT.getName(), editor == null ? null : editor.getComponent(), parent)
));
}
private void update() {
if (update.isDisposed()) return;
update.cancelAllRequests();
update.addRequest(new Runnable() {
@Override
public void run() {
if (isDisposed()) return;
VirtualFile file = getSelectedFile();
actionEnabled = false;
String charsetName = null;
Pair<Charset, String> check = null;
if (file != null) {
check = EncodingUtil.checkSomeActionEnabled(file);
Charset charset = null;
if (LoadTextUtil.wasCharsetDetectedFromBytes(file) != null) {
charset = cachedCharsetFromContent(file);
}
if (charset == null) {
charset = file.getCharset();
}
actionEnabled = check == null || check.second == null;
if (!actionEnabled) {
charset = check.first;
}
if (charset != null) {
charsetName = charset.displayName();
}
}
if (charsetName == null) {
charsetName = "n/a";
}
String toolTipText;
if (actionEnabled) {
toolTipText = String.format(
"File Encoding%n%s", charsetName);
myComponent.setForeground(UIUtil.getActiveTextColor());
myComponent.setTextAlignment(Component.LEFT_ALIGNMENT);
}
else {
String failReason = check == null ? "" : check.second;
toolTipText = String.format("File encoding is disabled%n%s",
failReason);
myComponent.setForeground(UIUtil.getInactiveTextColor());
myComponent.setTextAlignment(Component.CENTER_ALIGNMENT);
}
myComponent.setToolTipText(toolTipText);
myComponent.setText(charsetName);
if (myStatusBar != null) {
myStatusBar.updateWidget(ID());
}
}
}, 200, ModalityState.any());
}
@Override
public JComponent getComponent() {
return myComponent;
}
}
| apache-2.0 |
guusdk/Spark | plugins/fastpath/src/main/java/org/jivesoftware/fastpath/workspace/panes/InvitationPane.java | 13208 | /**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.fastpath.workspace.panes;
import java.awt.Color;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.swing.JLabel;
import javax.swing.JPanel;
import org.jivesoftware.fastpath.FastpathPlugin;
import org.jivesoftware.fastpath.FpRes;
import org.jivesoftware.fastpath.resources.FastpathRes;
import org.jivesoftware.fastpath.workspace.Workpane.RoomState;
import org.jivesoftware.fastpath.workspace.assistants.RoomInformation;
import org.jivesoftware.fastpath.workspace.util.RequestUtils;
import org.jivesoftware.resource.SparkRes;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.smackx.muc.MultiUserChatManager;
import org.jivesoftware.smackx.xdata.form.FillableForm;
import org.jivesoftware.smackx.muc.Affiliate;
import org.jivesoftware.smackx.muc.MultiUserChat;
import org.jivesoftware.smackx.workgroup.MetaData;
import org.jivesoftware.spark.ChatManager;
import org.jivesoftware.spark.ChatNotFoundException;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.component.LinkLabel;
import org.jivesoftware.spark.component.RolloverButton;
import org.jivesoftware.spark.component.WrappedLabel;
import org.jivesoftware.spark.ui.ChatContainer;
import org.jivesoftware.spark.ui.conferences.ConferenceUtils;
import org.jivesoftware.spark.ui.rooms.GroupChatRoom;
import org.jivesoftware.spark.util.ResourceUtils;
import org.jivesoftware.spark.util.SwingWorker;
import org.jivesoftware.spark.util.log.Log;
import org.jxmpp.jid.EntityBareJid;
import org.jxmpp.jid.Jid;
import org.jxmpp.jid.util.JidUtil;
import org.jxmpp.util.XmppStringUtils;
public class InvitationPane {
private Map<String, List<String>> metadata = null;
private GroupChatRoom chatRoom;
public InvitationPane(final RequestUtils request, final EntityBareJid room, final EntityBareJid inviter, String reason, final String password, final Message message) {
final JPanel transcriptAlert = new JPanel();
transcriptAlert.setBackground(Color.white);
transcriptAlert.setLayout(new GridBagLayout());
JLabel userImage = new JLabel(FastpathRes.getImageIcon(FastpathRes.FASTPATH_IMAGE_16x16));
userImage.setHorizontalAlignment(JLabel.LEFT);
String title = FpRes.getString("title.fastpath.invitation");
if (request.isTransfer()) {
title = FpRes.getString("title.fastpath.transfer");
}
userImage.setText(title);
transcriptAlert.add(userImage, new GridBagConstraints(0, 0, 4, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0));
userImage.setFont(new Font("Dialog", Font.BOLD, 12));
final JLabel inviterLabel = new JLabel(FpRes.getString("from") + ":");
inviterLabel.setFont(new Font("Dialog", Font.BOLD, 11));
final WrappedLabel inviterValueLabel = new WrappedLabel();
String nickname = SparkManager.getUserManager().getUserNicknameFromJID(inviter);
inviterValueLabel.setText(nickname);
transcriptAlert.add(inviterLabel, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
transcriptAlert.add(inviterValueLabel, new GridBagConstraints(1, 1, 3, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0));
final JLabel nameLabel = new JLabel(FpRes.getString("room") + ":");
nameLabel.setFont(new Font("Dialog", Font.BOLD, 11));
final WrappedLabel valueLabel = new WrappedLabel();
valueLabel.setText(room.toString());
transcriptAlert.add(nameLabel, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
transcriptAlert.add(valueLabel, new GridBagConstraints(1, 2, 3, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0));
final JLabel messageLabel = new JLabel(FpRes.getString("message") + ":");
messageLabel.setFont(new Font("Dialog", Font.BOLD, 11));
final WrappedLabel messageValueLabel = new WrappedLabel();
messageValueLabel.setText(reason);
transcriptAlert.add(messageLabel, new GridBagConstraints(0, 3, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
transcriptAlert.add(messageValueLabel, new GridBagConstraints(1, 3, 3, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0));
// Add accept button and reject button.
final RolloverButton acceptButton = new RolloverButton("Accept", FastpathRes.getImageIcon(FastpathRes.CIRCLE_CHECK_IMAGE));
final RolloverButton rejectButton = new RolloverButton("Decline", FastpathRes.getImageIcon(FastpathRes.SMALL_DELETE));
ResourceUtils.resButton(acceptButton, FpRes.getString("button.accept"));
ResourceUtils.resButton(rejectButton, FpRes.getString("button.reject"));
LinkLabel infoButton = new LinkLabel(FpRes.getString("message.view.more.information"), null, Color.blue, Color.red);
transcriptAlert.add(infoButton, new GridBagConstraints(0, 4, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
transcriptAlert.add(acceptButton, new GridBagConstraints(1, 4, 1, 1, 1.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
transcriptAlert.add(rejectButton, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
transcriptAlert.add(new JLabel(), new GridBagConstraints(2, 5, 1, 1, 0.0, 1.0, GridBagConstraints.SOUTH, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
MetaData metaDataExt = message.getExtension(MetaData.ELEMENT_NAME, MetaData.NAMESPACE);
if (metaDataExt != null) {
metadata = metaDataExt.getMetaData();
}
infoButton.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
RoomInformation roomInformation = new RoomInformation();
if (metadata != null) {
roomInformation.showAllInformation(metadata);
roomInformation.showRoomInformation();
}
}
});
acceptButton.addActionListener(actionEvent -> {
SwingWorker waiter = new SwingWorker() {
public Object construct() {
try {
Thread.sleep(50);
}
catch (InterruptedException e) {
Log.error(e);
}
return true;
}
public void finished() {
String roomName = request.getUsername();
chatRoom.getSplitPane().getRightComponent().setVisible(true);
chatRoom.getBottomPanel().setVisible(true);
chatRoom.getScrollPaneForTranscriptWindow().setVisible(true);
chatRoom.getEditorBar().setVisible(true);
chatRoom.getChatInputEditor().setEnabled(true);
chatRoom.getToolBar().setVisible(true);
chatRoom.getVerticalSlipPane().setDividerLocation(0.8);
chatRoom.getSplitPane().setDividerLocation(0.8);
transcriptAlert.setVisible(false);
String name = XmppStringUtils.parseLocalpart(roomName);
try {
chatRoom.setTabTitle(roomName);
chatRoom.getConferenceRoomInfo().setNicknameChangeAllowed(false);
chatRoom.getToolBar().setVisible(true);
chatRoom.getEditorBar().setVisible(true);
chatRoom.getChatInputEditor().setEnabled(true);
ChatContainer chatContainer = SparkManager.getChatManager().getChatContainer();
chatContainer.setChatRoomTitle(chatRoom, roomName);
if (chatContainer.getActiveChatRoom() == chatRoom) {
chatContainer.getChatFrame().setTitle(roomName);
}
}
catch (Exception e) {
Log.error(e);
}
ConferenceUtils.enterRoomOnSameThread(roomName, room, password);
removeOwner(chatRoom.getMultiUserChat());
FastpathPlugin.getLitWorkspace().checkForDecoration(chatRoom, request.getSessionID());
}
};
waiter.start();
});
// Add to Chat window
ChatManager chatManager = SparkManager.getChatManager();
try {
chatRoom = chatManager.getGroupChat(room);
}
catch (ChatNotFoundException e) {
MultiUserChat chat = MultiUserChatManager.getInstanceFor( SparkManager.getConnection() ).getMultiUserChat( room );
chatRoom = new GroupChatRoom(chat);
}
chatRoom.setTabTitle(title);
chatRoom.setTabIcon(SparkRes.getImageIcon(SparkRes.FASTPATH_IMAGE_16x16));
chatRoom.getChatWindowPanel().add(transcriptAlert, new GridBagConstraints(0, 9, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(1, 0, 1, 0), 0, 0));
// set invisible
chatRoom.getSplitPane().getRightComponent().setVisible(false);
chatRoom.getBottomPanel().setVisible(false);
chatRoom.getScrollPaneForTranscriptWindow().setVisible(false);
SparkManager.getChatManager().getChatContainer().addChatRoom(chatRoom);
FastpathPlugin.getLitWorkspace().addFastpathChatRoom(chatRoom, RoomState.invitationRequest);
rejectButton.addActionListener(actionEvent -> {
// Add to Chat window
ChatManager chatManager1 = SparkManager.getChatManager();
chatManager1.removeChat(chatRoom);
try
{
MultiUserChatManager.getInstanceFor( SparkManager.getConnection() ).decline( room, inviter, "No thank you" );
}
catch ( SmackException.NotConnectedException | InterruptedException e )
{
Log.warning( "Unable to deline invatation from " + inviter + " to join room " + room, e );
}
});
}
/**
* Removes oneself as an owner of the room.
*
* @param muc the <code>MultiUserChat</code> of the chat room.
*/
private void removeOwner(MultiUserChat muc) {
if (muc.isJoined()) {
// Try and remove myself as an owner if I am one.
Collection<Affiliate> owners;
try {
owners = muc.getOwners();
}
catch (XMPPException | SmackException | InterruptedException e1) {
return;
}
if (owners == null) {
return;
}
Iterator<Affiliate> iter = owners.iterator();
List<Jid> list = new ArrayList<>();
while (iter.hasNext()) {
Affiliate affiliate = iter.next();
Jid jid = affiliate.getJid();
if (!jid.equals(SparkManager.getSessionManager().getUserBareAddress())) {
list.add(jid);
}
}
if (list.size() > 0) {
try {
FillableForm form = muc.getConfigurationForm().getFillableForm();
List<String> jidStrings = new ArrayList<>(list.size());
JidUtil.toStrings(list, jidStrings);
form.setAnswer("muc#roomconfig_roomowners", jidStrings);
// new DataFormDialog(groupChat, form);
muc.sendConfigurationForm(form);
}
catch (XMPPException | SmackException | InterruptedException e) {
Log.error(e);
}
}
}
}
}
| apache-2.0 |
bazaarvoice/ostrich | perf-test-suite/src/main/java/com/bazaarvoice/ostrich/perftest/utils/ChaosRunner.java | 2441 | package com.bazaarvoice.ostrich.perftest.utils;
import com.bazaarvoice.ostrich.ServiceEndPoint;
import com.bazaarvoice.ostrich.metrics.Metrics;
import com.bazaarvoice.ostrich.perftest.core.Service;
import com.bazaarvoice.ostrich.pool.ServiceCache;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableList;
import java.util.List;
public class ChaosRunner {
private final ServiceCache<Service<String, String>> _serviceCache;
private final int _chaosWorkers;
private final Meter _chaosMeter, _stableMeter;
private final int _chaosInterval;
public ChaosRunner(ServiceCache<Service<String, String>> serviceCache, Arguments arguments, MetricRegistry metricRegistry) {
_serviceCache = serviceCache;
_chaosWorkers = arguments.getChaosWorkers();
Metrics.InstanceMetrics _metrics = Metrics.forInstance(metricRegistry, this, "ChaosRunner");
_chaosMeter = _metrics.meter("Chaos");
_stableMeter = _metrics.meter("Stable");
_chaosInterval = arguments.getChaosInterval();
}
public List<Thread> generateChaosWorkers() {
ImmutableList.Builder<Thread> chaosWorkersBuilder = ImmutableList.builder();
for(int i=0; i<_chaosWorkers; i++) {
Runnable runnable = new Runnable() {
@Override
public void run() {
while(!Thread.interrupted()) {
try {
int sleepTime = Utilities.getRandomInt(_chaosInterval);
Utilities.sleepForSeconds(sleepTime);
String hashName = HashFunction.getRandomHashName();
ServiceEndPoint endPoint = Utilities.buildServiceEndPoint(hashName);
_serviceCache.evict(endPoint);
_chaosMeter.mark();
Utilities.sleepForSeconds(_chaosInterval - sleepTime);
_serviceCache.register(endPoint);
_stableMeter.mark();
}
catch(Exception ignored) {
ignored.printStackTrace();
}
}
}
};
chaosWorkersBuilder.add(new Thread(runnable));
}
return chaosWorkersBuilder.build();
}
}
| apache-2.0 |
USEF-Foundation/ri.usef.energy | usef-build/usef-workflow/usef-dso/src/test/java/energy/usef/dso/workflow/settlement/initiate/FinalizeUnfinishedInitiateSettlementEventTriggerTest.java | 3049 | /*
* Copyright 2015-2016 USEF Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package energy.usef.dso.workflow.settlement.initiate;
import energy.usef.core.service.helper.SchedulerHelperService;
import energy.usef.core.service.helper.WorkItemExecution;
import energy.usef.dso.config.ConfigDso;
import energy.usef.dso.config.ConfigDsoParam;
import javax.enterprise.event.Event;
import org.joda.time.Minutes;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
/**
* Test class in charge of the unit tests related to the {@link FinalizeUnfinishedInitiateSettlementEventTrigger}.
*/
@RunWith(PowerMockRunner.class)
public class FinalizeUnfinishedInitiateSettlementEventTriggerTest {
@Mock
private Event<FinalizeUnfinishedInitiateSettlementEvent> eventManager;
@Mock
private SchedulerHelperService schedulerHelperService;
@Mock
private ConfigDso configDso;
private FinalizeUnfinishedInitiateSettlementEventTrigger trigger;
@Before
public void init() throws Exception {
trigger = new FinalizeUnfinishedInitiateSettlementEventTrigger();
Whitebox.setInternalState(trigger, schedulerHelperService);
Whitebox.setInternalState(trigger, eventManager);
Whitebox.setInternalState(trigger, configDso);
PowerMockito.when(configDso.getIntegerProperty(ConfigDsoParam.DSO_METER_DATA_QUERY_EXPIRATION_CHECK_INTERVAL_IN_MINUTES))
.thenReturn(60);
}
@Test
public void testScheduleTrigger() throws Exception {
trigger.scheduleTrigger();
Long interval = Minutes.minutes(60).toStandardDuration().getMillis();
ArgumentCaptor<WorkItemExecution> captor = ArgumentCaptor.forClass(WorkItemExecution.class);
Mockito.verify(schedulerHelperService, Mockito.times(1))
.registerScheduledCall(Matchers.anyString(), captor.capture(), Matchers.eq(0L), Matchers.eq(interval));
WorkItemExecution workItemExecution = captor.getValue();
Assert.assertNotNull("Scheduled task is empty", workItemExecution);
workItemExecution.execute();
Mockito.verify(eventManager, Mockito.times(1)).fire(Matchers.any(FinalizeUnfinishedInitiateSettlementEvent.class));
}
}
| apache-2.0 |
apache/geronimo-yoko | yoko-core/src/main/java/org/apache/yoko/orb/IMR/ServerFactoryOperations.java | 2358 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.yoko.orb.IMR;
//
// IDL:orb.yoko.apache.org/IMR/ServerFactory:1.0
//
/***/
public interface ServerFactoryOperations
{
//
// IDL:orb.yoko.apache.org/IMR/ServerFactory/get_server:1.0
//
/**
*
* Retrieve a server record.
*
* @param server The server name
*
* @return The server record.
*
**/
Server
get_server(String server)
throws NoSuchServer;
//
// IDL:orb.yoko.apache.org/IMR/ServerFactory/create_server_record:1.0
//
/**
*
* Create server record.
*
* @param server The server name
*
* @return The ID of the created server
*
**/
Server
create_server_record(String server)
throws ServerAlreadyRegistered;
//
// IDL:orb.yoko.apache.org/IMR/ServerFactory/list_servers:1.0
//
/**
*
* List all the server records.
*
* @return A sequence of server records.
*
**/
Server[]
list_servers();
//
// IDL:orb.yoko.apache.org/IMR/ServerFactory/list_servers_by_host:1.0
//
/**
*
* List the server records for a particular OAD/Host.
*
* @return A sequence of server records.
*
**/
Server[]
list_servers_by_host(String host);
//
// IDL:orb.yoko.apache.org/IMR/ServerFactory/list_orphaned_servers:1.0
//
/**
*
* List the orphaned server records.
*
* @return A sequence of server records.
*
**/
Server[]
list_orphaned_servers();
}
| apache-2.0 |
Ztiany/CodeRepository | JavaEE/MVC-Sample-Register/src/main/java/com/ztiany/register/web/bean/UserRegisterFormBean.java | 3062 | package com.ztiany.register.web.bean;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.Map;
/**
* 属性和表单输入域名称的保持一致;字段都是String类型,完成用户输入的验证;验证错误的记住错误提示信息。
*/
public class UserRegisterFormBean {
private String username;
private String password;
private String repassword;
private String email;
private String birthday;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getRepassword() {
return repassword;
}
public void setRepassword(String repassword) {
this.repassword = repassword;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getBirthday() {
return birthday;
}
public void setBirthday(String birthday) {
this.birthday = birthday;
}
//封装错误信息提示:key,表单字段名;value:提示信息
private Map<String, String> errors = new HashMap<String, String>();
public Map<String, String> getErrors() {
return errors;
}
/**
* 用户输入信息的验证
*
* @return
*/
public boolean validate() {
if (username == null) {
errors.put("username", "必须输入用户名");
} else if (!username.matches("[a-zA-Z]{3,8}")) {
errors.put("username", "用户名必须是3~8位字母组成");
}
if (password == null) {
errors.put("password", "必须输入密码");
} else if (!password.matches("\\d{3,8}")) {
errors.put("password", "密码必须是3~8位数字组成");
}
if (password != null) {
if (!password.equals(repassword)) {
errors.put("repassword", "两次密码必须一致");
}
}
if (email == null || email.trim().equals("")) {
errors.put("email", "请输入邮箱");
} else if (!email.matches("\\b^['_a-z0-9-\\+]+(\\.['_a-z0-9-\\+]+)*@[a-z0-9-]+(\\.[a-z0-9-]+)*\\.([a-z]{2}|aero|arpa|asia|biz|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|nato|net|org|pro|tel|travel|xxx)$\\b")) {
errors.put("email", "请输入正确的邮箱");
}
if (birthday == null) {
errors.put("birthday", "必须输入生日");
} else {
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
try {
df.parse(birthday);
} catch (ParseException e) {
errors.put("birthday", "生日必须符合格式");
}
}
return errors.isEmpty();
}
}
| apache-2.0 |
apache/bookkeeper | bookkeeper-stats-providers/prometheus-metrics-provider/src/test/java/org/apache/bookkeeper/stats/prometheus/TestPrometheusFormatter.java | 9093 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.bookkeeper.stats.prometheus;
import static com.google.common.base.Preconditions.checkArgument;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import com.google.common.base.MoreObjects;
import com.google.common.base.Splitter;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import java.io.StringWriter;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.bookkeeper.stats.Counter;
import org.apache.bookkeeper.stats.OpStatsLogger;
import org.apache.bookkeeper.stats.StatsLogger;
import org.junit.Test;
/**
* Test for {@link PrometheusMetricsProvider}.
*/
public class TestPrometheusFormatter {
@Test(timeout = 30000)
public void testStatsOutput() throws Exception {
PrometheusMetricsProvider provider = new PrometheusMetricsProvider();
StatsLogger statsLogger = provider.getStatsLogger("test");
Counter counter = statsLogger.getCounter("my_counter");
counter.inc();
counter.inc();
OpStatsLogger opStats = statsLogger.getOpStatsLogger("op");
opStats.registerSuccessfulEvent(10, TimeUnit.MILLISECONDS);
opStats.registerSuccessfulEvent(5, TimeUnit.MILLISECONDS);
OpStatsLogger opStats1 = statsLogger.scopeLabel("test_label", "test_value")
.getOpStatsLogger("op_label");
opStats1.registerSuccessfulEvent(10, TimeUnit.MILLISECONDS);
opStats1.registerSuccessfulEvent(5, TimeUnit.MILLISECONDS);
opStats1.registerFailedEvent(1, TimeUnit.MILLISECONDS);
provider.rotateLatencyCollection();
StringWriter writer = new StringWriter();
provider.writeAllMetrics(writer);
writer.write("jvm_memory_direct_bytes_max{} 4.77626368E8\n");
writer.write("jvm_memory_pool_bytes_used{pool=\"Code Cache\"} 3347712.0\n");
writer.write("jvm_memory_pool_bytes_used{pool=\"CodeHeap 'non-nmethods'\"} 1207168.0\n");
System.out.println(writer);
Multimap<String, Metric> metrics = parseMetrics(writer.toString());
System.out.println(metrics);
List<Metric> cm = (List<Metric>) metrics.get("test_my_counter");
assertEquals(1, cm.size());
assertEquals(0, cm.get(0).tags.size());
assertEquals(2.0, cm.get(0).value, 0.0);
// test_op_sum
cm = (List<Metric>) metrics.get("test_op_sum");
assertEquals(2, cm.size());
Metric m = cm.get(0);
assertEquals(1, cm.get(0).tags.size());
assertEquals(0.0, m.value, 0.0);
assertEquals(1, m.tags.size());
assertEquals("false", m.tags.get("success"));
m = cm.get(1);
assertEquals(1, cm.get(0).tags.size());
assertEquals(15.0, m.value, 0.0);
assertEquals(1, m.tags.size());
assertEquals("true", m.tags.get("success"));
// test_op_count
cm = (List<Metric>) metrics.get("test_op_count");
assertEquals(2, cm.size());
m = cm.get(0);
assertEquals(1, cm.get(0).tags.size());
assertEquals(0.0, m.value, 0.0);
assertEquals(1, m.tags.size());
assertEquals("false", m.tags.get("success"));
m = cm.get(1);
assertEquals(1, cm.get(0).tags.size());
assertEquals(2.0, m.value, 0.0);
assertEquals(1, m.tags.size());
assertEquals("true", m.tags.get("success"));
// Latency
cm = (List<Metric>) metrics.get("test_op");
assertEquals(14, cm.size());
boolean found = false;
for (Metric mt : cm) {
if ("true".equals(mt.tags.get("success")) && "1.0".equals(mt.tags.get("quantile"))) {
assertEquals(10.0, mt.value, 0.0);
found = true;
}
}
assertTrue(found);
// test_op_label_sum
cm = (List<Metric>) metrics.get("test_op_label_sum");
assertEquals(2, cm.size());
m = cm.get(0);
assertEquals(2, m.tags.size());
assertEquals(1.0, m.value, 0.0);
assertEquals("false", m.tags.get("success"));
assertEquals("test_value", m.tags.get("test_label"));
m = cm.get(1);
assertEquals(15.0, m.value, 0.0);
assertEquals(2, m.tags.size());
assertEquals("true", m.tags.get("success"));
assertEquals("test_value", m.tags.get("test_label"));
// test_op_label_count
cm = (List<Metric>) metrics.get("test_op_label_count");
assertEquals(2, cm.size());
m = cm.get(0);
assertEquals(1, m.value, 0.0);
assertEquals(2, m.tags.size());
assertEquals("false", m.tags.get("success"));
assertEquals("test_value", m.tags.get("test_label"));
m = cm.get(1);
assertEquals(2.0, m.value, 0.0);
assertEquals(2, m.tags.size());
assertEquals("true", m.tags.get("success"));
assertEquals("test_value", m.tags.get("test_label"));
// Latency
cm = (List<Metric>) metrics.get("test_op_label");
assertEquals(14, cm.size());
found = false;
for (Metric mt : cm) {
if ("true".equals(mt.tags.get("success"))
&& "test_value".equals(mt.tags.get("test_label"))
&& "1.0".equals(mt.tags.get("quantile"))) {
assertEquals(10.0, mt.value, 0.0);
found = true;
}
}
assertTrue(found);
}
/**
* Hacky parsing of Prometheus text format. Sould be good enough for unit tests
*/
private static Multimap<String, Metric> parseMetrics(String metrics) {
Multimap<String, Metric> parsed = ArrayListMultimap.create();
// Example of lines are
// jvm_threads_current{cluster="standalone",} 203.0
// or
// pulsar_subscriptions_count{cluster="standalone", namespace="sample/standalone/ns1",
// topic="persistent://sample/standalone/ns1/test-2"} 0.0 1517945780897
Pattern pattern = Pattern.compile("^(\\w+)(\\{([^\\}]*)\\})?\\s(-?[\\d\\w\\.]+)(\\s(\\d+))?$");
Pattern formatPattern =
Pattern.compile("^(\\w+)(\\{((\\w+=[-\\s\\\'\\\"\\.\\w]+(,\\s?\\w+=[\\\"\\.\\w]+)*))?\\})?"
+ "\\s(-?[\\d\\w\\.]+)(\\s(\\d+))?$");
Pattern tagsPattern = Pattern.compile("(\\w+)=\"([^\"]+)\"(,\\s?)?");
Splitter.on("\n").split(metrics).forEach(line -> {
if (line.isEmpty() || line.startsWith("#")) {
return;
}
System.err.println("LINE: '" + line + "'");
Matcher matcher = pattern.matcher(line);
Matcher formatMatcher = formatPattern.matcher(line);
System.err.println("Matches: " + matcher.matches());
System.err.println(matcher);
assertTrue(matcher.matches());
assertTrue("failed to validate line: " + line, formatMatcher.matches());
assertEquals(6, matcher.groupCount());
System.err.println("groups: " + matcher.groupCount());
for (int i = 0; i < matcher.groupCount(); i++) {
System.err.println(" GROUP " + i + " -- " + matcher.group(i));
}
checkArgument(matcher.matches());
checkArgument(formatMatcher.matches());
String name = matcher.group(1);
Metric m = new Metric();
m.value = Double.parseDouble(matcher.group(4));
String tags = matcher.group(3);
if (tags != null) {
Matcher tagsMatcher = tagsPattern.matcher(tags);
while (tagsMatcher.find()) {
String tag = tagsMatcher.group(1);
String value = tagsMatcher.group(2);
m.tags.put(tag, value);
}
}
parsed.put(name, m);
});
return parsed;
}
static class Metric {
Map<String, String> tags = new TreeMap<>();
double value;
@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("tags", tags).add("value", value).toString();
}
}
}
| apache-2.0 |
vedi/android-profile | SoomlaAndroidProfile/src/com/soomla/profile/events/gameservices/BaseGameServicesEvent.java | 1212 | /*
* Copyright (C) 2012-2015 Soomla Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.soomla.profile.events.gameservices;
import com.soomla.profile.domain.IProvider;
/**
* The base class for all game services events
*/
public abstract class BaseGameServicesEvent {
/**
* The provider on which the game services event has occurred
*/
public final IProvider.Provider Provider;
/**
* an identification String sent from the caller of the action
*/
public final String Payload;
protected BaseGameServicesEvent(IProvider.Provider provider, String payload) {
Provider = provider;
Payload = payload;
}
}
| apache-2.0 |
apache/flex-blazeds | core/src/main/java/flex/management/runtime/messaging/endpoints/HTTPEndpointControl.java | 1741 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flex.management.runtime.messaging.endpoints;
import flex.management.BaseControl;
import flex.messaging.endpoints.HTTPEndpoint;
/**
* The <code>HTTPEndpointControl</code> class is the MBean implemenation
* for monitoring and managing a <code>HTTPEndpoint</code> at runtime.
*/
public class HTTPEndpointControl extends PollingEndpointControl implements
HTTPEndpointControlMBean
{
private static final String TYPE = "HTTPEndpoint";
/**
* Constructs a <code>HTTPEndpointControl</code>, assigning managed message
* endpoint and parent MBean.
*
* @param endpoint The <code>HTTPEndpoint</code> managed by this MBean.
* @param parent The parent MBean in the management hierarchy.
*/
public HTTPEndpointControl(HTTPEndpoint endpoint, BaseControl parent)
{
super(endpoint, parent);
}
/** {@inheritDoc} */
public String getType()
{
return TYPE;
}
}
| apache-2.0 |
erichwang/presto | presto-benchto-benchmarks/src/test/java/io/prestosql/sql/planner/TestTpcdsCostBasedPlan.java | 3395 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner;
import com.google.common.collect.ImmutableMap;
import io.prestosql.Session;
import io.prestosql.plugin.tpcds.TpcdsConnectorFactory;
import io.prestosql.sql.analyzer.FeaturesConfig.JoinDistributionType;
import io.prestosql.sql.analyzer.FeaturesConfig.JoinReorderingStrategy;
import io.prestosql.testing.LocalQueryRunner;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import static io.prestosql.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE;
import static io.prestosql.SystemSessionProperties.JOIN_REORDERING_STRATEGY;
import static io.prestosql.testing.TestingSession.testSessionBuilder;
import static java.lang.String.format;
/**
* This class tests cost-based optimization rules related to joins. It contains unmodified TPCDS queries.
* This class is using TPCDS connector configured in way to mock Hive connector with unpartitioned TPCDS tables.
*/
public class TestTpcdsCostBasedPlan
extends AbstractCostBasedPlanTest
{
/*
* CAUTION: The expected plans here are not necessarily optimal yet. Their role is to prevent
* inadvertent regressions. A conscious improvement to the planner may require changing some
* of the expected plans, but any such change should be verified on an actual cluster with
* large amount of data.
*/
@Override
protected LocalQueryRunner createLocalQueryRunner()
{
String catalog = "local";
Session.SessionBuilder sessionBuilder = testSessionBuilder()
.setCatalog(catalog)
.setSchema("sf3000.0")
.setSystemProperty("task_concurrency", "1") // these tests don't handle exchanges from local parallel
.setSystemProperty(JOIN_REORDERING_STRATEGY, JoinReorderingStrategy.AUTOMATIC.name())
.setSystemProperty(JOIN_DISTRIBUTION_TYPE, JoinDistributionType.AUTOMATIC.name());
LocalQueryRunner queryRunner = LocalQueryRunner.builder(sessionBuilder.build())
.withNodeCountForStats(8)
.build();
queryRunner.createCatalog(
catalog,
new TpcdsConnectorFactory(1),
ImmutableMap.of());
return queryRunner;
}
@Override
protected Stream<String> getQueryResourcePaths()
{
return IntStream.range(1, 100)
.mapToObj(i -> format("q%02d", i))
.map(queryId -> format("/sql/presto/tpcds/%s.sql", queryId));
}
@SuppressWarnings("unused")
public static final class UpdateTestFiles
{
// Intellij doesn't handle well situation when test class has main(), hence inner class.
private UpdateTestFiles() {}
public static void main(String[] args)
{
new TestTpcdsCostBasedPlan().generate();
}
}
}
| apache-2.0 |
deeplearning4j/deeplearning4j | deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ArrayPairToPair.java | 1331 | /*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.spark.impl.graph.scoring;
import org.apache.spark.api.java.function.PairFunction;
import org.nd4j.linalg.api.ndarray.INDArray;
import scala.Tuple2;
/**
* Simple conversion function for SparkComputationGraph
*
* @author Alex Black
*/
public class ArrayPairToPair<K> implements PairFunction<Tuple2<K, INDArray[]>, K, INDArray> {
@Override
public Tuple2<K, INDArray> call(Tuple2<K, INDArray[]> v1) throws Exception {
INDArray arr = (v1._2() == null ? null : v1._2()[0]);
return new Tuple2<>(v1._1(), arr);
}
}
| apache-2.0 |
crate/crate | server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java | 8639 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS;
/**
* A class whose instances represent a value for counting the number
* of active shard copies for a given shard in an index.
*/
public final class ActiveShardCount implements Writeable {
private static final int ACTIVE_SHARD_COUNT_DEFAULT = -2;
private static final int ALL_ACTIVE_SHARDS = -1;
public static final ActiveShardCount DEFAULT = new ActiveShardCount(ACTIVE_SHARD_COUNT_DEFAULT);
public static final ActiveShardCount ALL = new ActiveShardCount(ALL_ACTIVE_SHARDS);
public static final ActiveShardCount NONE = new ActiveShardCount(0);
public static final ActiveShardCount ONE = new ActiveShardCount(1);
private final int value;
private ActiveShardCount(final int value) {
this.value = value;
}
/**
* Get an ActiveShardCount instance for the given value. The value is first validated to ensure
* it is a valid shard count and throws an IllegalArgumentException if validation fails. Valid
* values are any non-negative number. Directly use {@link ActiveShardCount#DEFAULT} for the
* default value (which is one shard copy) or {@link ActiveShardCount#ALL} to specify all the shards.
*/
public static ActiveShardCount from(final int value) {
if (value < 0) {
throw new IllegalArgumentException("shard count cannot be a negative value");
}
return get(value);
}
/**
* Validates that the instance is valid for the given number of replicas in an index.
*/
public boolean validate(final int numberOfReplicas) {
assert numberOfReplicas >= 0;
return value <= numberOfReplicas + 1;
}
private static ActiveShardCount get(final int value) {
switch (value) {
case ACTIVE_SHARD_COUNT_DEFAULT:
return DEFAULT;
case ALL_ACTIVE_SHARDS:
return ALL;
case 1:
return ONE;
case 0:
return NONE;
default:
assert value > 1;
return new ActiveShardCount(value);
}
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeInt(value);
}
public static ActiveShardCount readFrom(final StreamInput in) throws IOException {
return get(in.readInt());
}
/**
* Parses the active shard count from the given string. Valid values are "all" for
* all shard copies, null for the default value (which defaults to one shard copy),
* or a numeric value greater than or equal to 0. Any other input will throw an
* IllegalArgumentException.
*/
public static ActiveShardCount parseString(final String str) {
if (str == null) {
return ActiveShardCount.DEFAULT;
} else if (str.equalsIgnoreCase("all")) {
return ActiveShardCount.ALL;
} else {
int val;
try {
val = Integer.parseInt(str);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("cannot parse ActiveShardCount[" + str + "]", e);
}
return ActiveShardCount.from(val);
}
}
/**
* Returns true iff the given number of active shards is enough to meet
* the required shard count represented by this instance. This method
* should only be invoked with {@link ActiveShardCount} objects created
* from {@link #from(int)}, or {@link #NONE} or {@link #ONE}.
*/
public boolean enoughShardsActive(final int activeShardCount) {
if (this.value < 0) {
throw new IllegalStateException("not enough information to resolve to shard count");
}
if (activeShardCount < 0) {
throw new IllegalArgumentException("activeShardCount cannot be negative");
}
return this.value <= activeShardCount;
}
/**
* Returns true iff the given cluster state's routing table contains enough active
* shards for the given indices to meet the required shard count represented by this instance.
*/
public boolean enoughShardsActive(final ClusterState clusterState, final String... indices) {
if (this == ActiveShardCount.NONE) {
// not waiting for any active shards
return true;
}
for (final String indexName : indices) {
final IndexMetadata indexMetadata = clusterState.metadata().index(indexName);
if (indexMetadata == null) {
// its possible the index was deleted while waiting for active shard copies,
// in this case, we'll just consider it that we have enough active shard copies
// and we can stop waiting
continue;
}
final IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(indexName);
assert indexRoutingTable != null;
if (indexRoutingTable.allPrimaryShardsActive() == false) {
// all primary shards aren't active yet
return false;
}
ActiveShardCount waitForActiveShards = this;
if (waitForActiveShards == ActiveShardCount.DEFAULT) {
waitForActiveShards = SETTING_WAIT_FOR_ACTIVE_SHARDS.get(indexMetadata.getSettings());
}
for (final IntObjectCursor<IndexShardRoutingTable> shardRouting : indexRoutingTable.getShards()) {
if (waitForActiveShards.enoughShardsActive(shardRouting.value) == false) {
// not enough active shard copies yet
return false;
}
}
}
return true;
}
/**
* Returns true iff the active shard count in the shard routing table is enough
* to meet the required shard count represented by this instance.
*/
public boolean enoughShardsActive(final IndexShardRoutingTable shardRoutingTable) {
final int activeShardCount = shardRoutingTable.activeShards().size();
if (this == ActiveShardCount.ALL) {
// adding 1 for the primary in addition to the total number of replicas,
// which gives us the total number of shard copies
return activeShardCount == shardRoutingTable.replicaShards().size() + 1;
} else if (this == ActiveShardCount.DEFAULT) {
return activeShardCount >= 1;
} else {
return activeShardCount >= value;
}
}
@Override
public int hashCode() {
return Integer.hashCode(value);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
@SuppressWarnings("unchecked") ActiveShardCount that = (ActiveShardCount) o;
return value == that.value;
}
@Override
public String toString() {
switch (value) {
case ALL_ACTIVE_SHARDS:
return "ALL";
case ACTIVE_SHARD_COUNT_DEFAULT:
return "DEFAULT";
default:
return Integer.toString(value);
}
}
}
| apache-2.0 |
aguynamedben/cassandra-counters | src/java/org/apache/cassandra/streaming/StreamHeader.java | 4055 | package org.apache.cassandra.streaming;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.cassandra.io.ICompactSerializer;
public class StreamHeader
{
private static ICompactSerializer<StreamHeader> serializer;
static
{
serializer = new StreamHeaderSerializer();
}
public static ICompactSerializer<StreamHeader> serializer()
{
return serializer;
}
private PendingFile file;
private long sessionId;
// indicates an initiated transfer as opposed to a request
protected final boolean initiatedTransfer;
// this list will only be non-null when the first of a batch of files are being sent. it avoids having to have
// a separate message indicating which files to expect.
private final List<PendingFile> pending;
public StreamHeader(long sessionId, PendingFile file, boolean initiatedTransfer)
{
this.sessionId = sessionId;
this.file = file;
this.initiatedTransfer = initiatedTransfer;
pending = null;
}
public StreamHeader(long sessionId, PendingFile file, List<PendingFile> pending, boolean initiatedTransfer)
{
this.sessionId = sessionId;
this.file = file;
this.initiatedTransfer = initiatedTransfer;
this.pending = pending;
}
public List<PendingFile> getPendingFiles()
{
return pending;
}
public PendingFile getStreamFile()
{
return file;
}
public long getSessionId()
{
return sessionId;
}
private static class StreamHeaderSerializer implements ICompactSerializer<StreamHeader>
{
public void serialize(StreamHeader sh, DataOutputStream dos) throws IOException
{
dos.writeLong(sh.getSessionId());
PendingFile.serializer().serialize(sh.getStreamFile(), dos);
dos.writeBoolean(sh.initiatedTransfer);
if (sh.pending != null)
{
dos.writeInt(sh.getPendingFiles().size());
for(PendingFile file : sh.getPendingFiles())
{
PendingFile.serializer().serialize(file, dos);
}
}
else
dos.writeInt(0);
}
public StreamHeader deserialize(DataInputStream dis) throws IOException
{
long sessionId = dis.readLong();
PendingFile file = PendingFile.serializer().deserialize(dis);
boolean initiatedTransfer = dis.readBoolean();
int size = dis.readInt();
StreamHeader header;
if (size > 0)
{
List<PendingFile> pendingFiles = new ArrayList<PendingFile>(size);
for (int i=0; i<size; i++)
{
pendingFiles.add(PendingFile.serializer().deserialize(dis));
}
header = new StreamHeader(sessionId, file, pendingFiles, initiatedTransfer);
}
else
{
header = new StreamHeader(sessionId, file, initiatedTransfer);
}
return header;
}
}
}
| apache-2.0 |
belaban/JGroups | tests/junit-functional/org/jgroups/tests/NakackUnitTest.java | 4284 | package org.jgroups.tests;
import org.jgroups.*;
import org.jgroups.protocols.*;
import org.jgroups.protocols.pbcast.GMS;
import org.jgroups.protocols.pbcast.NAKACK2;
import org.jgroups.protocols.pbcast.STABLE;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Util;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.List;
/**
* Tests NAKACK2 functionality, especially flag {@link Message.TransientFlag#DONT_LOOPBACK}.
* @author Bela Ban
* @since 3.5
*/
@Test(groups=Global.FUNCTIONAL)
public class NakackUnitTest {
protected JChannel a, b;
protected MyReceiver ra, rb;
@AfterMethod protected void tearDown() throws Exception {Util.close(b, a);}
// @Test(invocationCount=10)
public void testMessagesToAllWithDontLoopback() throws Exception {
a=create("A", false); b=create("B", false);
createReceivers();
_testMessagesToAllWithDontLoopback();
}
// @Test(invocationCount=10)
public void testMessagesToOtherBatching() throws Exception {
a=create("A", true); b=create("B", true);
createReceivers();
_testMessagesToAllWithDontLoopback();
}
protected void _testMessagesToAllWithDontLoopback() throws Exception {
connect();
Message[] msgs={
msg(),
msg().setFlag(Message.Flag.OOB),
msg().setFlag(Message.Flag.OOB),
msg().setFlag(Message.Flag.OOB).setFlag(Message.TransientFlag.DONT_LOOPBACK),
msg().setFlag(Message.TransientFlag.DONT_LOOPBACK),
msg().setFlag(Message.Flag.OOB),
msg().setFlag(Message.Flag.OOB),
msg().setFlag(Message.Flag.OOB).setFlag(Message.TransientFlag.DONT_LOOPBACK),
msg().setFlag(Message.Flag.OOB).setFlag(Message.TransientFlag.DONT_LOOPBACK),
msg().setFlag(Message.Flag.OOB).setFlag(Message.TransientFlag.DONT_LOOPBACK),
msg().setFlag(Message.Flag.OOB).setFlag(Message.TransientFlag.DONT_LOOPBACK),
msg().setFlag(Message.Flag.OOB)
};
send(a, msgs);
checkReception(ra, 1,2,3,6,7,12);
checkReception(rb, 1,2,3,4,5,6,7,8,9,10,11,12);
}
protected static void send(JChannel ch, Message... msgs) throws Exception {
int cnt=1;
for(Message msg: msgs) {
msg.setObject(cnt++);
ch.send(msg);
}
}
protected static void checkReception(MyReceiver r, int... num) {
List<Integer> received=r.list();
for(int i=0; i < 10; i++) {
if(received.size() == num.length)
break;
Util.sleep(500);
}
List<Integer> expected=new ArrayList<>(num.length);
for(int n: num) expected.add(n);
System.out.println("received=" + received + ", expected=" + expected);
assert received.size() == expected.size() : "list=" + received + ", expected=" + expected;
assert received.containsAll(expected) : "list=" + received + ", expected=" + expected;
}
protected void createReceivers() {
a.setReceiver(ra=new MyReceiver());
b.setReceiver(rb=new MyReceiver());
}
protected static Message msg() {return new BytesMessage(null);}
protected static JChannel create(String name, boolean use_batching) throws Exception {
Protocol[] protocols={
new SHARED_LOOPBACK(),
new SHARED_LOOPBACK_PING(),
new MAKE_BATCH().sleepTime(100).multicasts(use_batching),
new NAKACK2(),
new UNICAST3(),
new STABLE(),
new GMS(),
new FRAG2().setFragSize(8000),
};
return new JChannel(protocols).name(name);
}
protected void connect() throws Exception {
a.connect("UnicastUnitTest");
b.connect("UnicastUnitTest");
Util.waitUntilAllChannelsHaveSameView(10000, 1000, a, b);
}
protected static class MyReceiver implements Receiver {
protected final List<Integer> list=new ArrayList<>();
public List<Integer> list() {return list;}
public void receive(Message msg) {
Integer num=msg.getObject();
synchronized(list) {
list.add(num);
}
}
}
} | apache-2.0 |
cuba-platform/cuba | modules/web-widgets/src/com/haulmont/cuba/web/widgets/UploadComponent.java | 856 | /*
* Copyright (c) 2008-2017 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.cuba.web.widgets;
import com.vaadin.ui.Component;
public interface UploadComponent extends Component, Component.Focusable {
String getAccept();
void setAccept(String accept);
void setDescription(String description);
} | apache-2.0 |
garricko/error-prone | core/src/main/java/com/google/errorprone/matchers/method/ConstructorMatcherImpl.java | 2475 | /*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.matchers.method;
import com.google.common.base.Optional;
import com.google.errorprone.VisitorState;
import com.google.errorprone.matchers.method.MethodMatchers.ConstructorClassMatcher;
import com.google.errorprone.matchers.method.MethodMatchers.ConstructorMatcher;
import com.google.errorprone.predicates.TypePredicates;
import com.google.errorprone.suppliers.Supplier;
import com.sun.source.tree.ExpressionTree;
import com.sun.tools.javac.code.Symbol.MethodSymbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.tree.JCTree.JCNewClass;
/** Matches constructors, allows refinement on class type. */
public class ConstructorMatcherImpl extends AbstractSimpleMatcher<MatchState>
implements ConstructorMatcher {
@Override
protected Optional<MatchState> matchResult(ExpressionTree tree, VisitorState state) {
// TODO(user): Don't catch NullPointerException. Need to do this right now
// for internal use, but remember to remove later.
try {
if (!(tree instanceof JCNewClass)) {
return Optional.absent();
}
JCNewClass newClass = (JCNewClass) tree;
Type clazz = newClass.constructor.getEnclosingElement().type;
if (!(newClass.constructor instanceof MethodSymbol)) {
return Optional.absent();
}
MethodSymbol sym = (MethodSymbol) newClass.constructor;
return Optional.of(MatchState.create(clazz, sym));
} catch (NullPointerException e) {
return Optional.absent();
}
}
@Override
public ConstructorClassMatcher forClass(String className) {
return new ConstructorClassMatcherImpl(this, TypePredicates.isExactType(className));
}
@Override
public ConstructorClassMatcher forClass(Supplier<Type> classType) {
return new ConstructorClassMatcherImpl(this, TypePredicates.isExactType(classType));
}
}
| apache-2.0 |
lz84/bachelor | bachelor-auth/bachelor-scaffold/src/main/java/cn/org/bachelor/common/auth/vo/Objects.java | 4594 | package cn.org.bachelor.common.auth.vo;
import java.util.Date;
public class Objects {
/**
* ID
*/
private String id;
/**
* 对象名称
*/
private String name;
/**
* 对象编码
*/
private String code;
/**
* 对象定位
*/
private String uri;
/**
* 对象操作
*/
private String operate;
/**
* 对象类型
*/
private String type;
/**
* 所属域编码
*/
private String domainCode;
/**
* 所属域名称
*/
private String domainName;
/**
* 排序
*/
private Integer seqOrder;
/**
* 默认权限行为
*/
private String defAuthOp;
/**
* 更新时间
*/
private Date updateTime;
/**
* 更新人
*/
private String updateUser;
/**
* 获取ID
*
* @return ID - ID
*/
public String getId() {
return id;
}
/**
* 设置ID
*
* @param id ID
*/
public void setId(String id) {
this.id = id;
}
/**
* 获取对象名称
*
* @return NAME - 对象名称
*/
public String getName() {
return name;
}
/**
* 设置对象名称
*
* @param name 对象名称
*/
public void setName(String name) {
this.name = name;
}
/**
* 获取对象编码
*
* @return CODE - 对象编码
*/
public String getCode() {
return code;
}
/**
* 设置对象编码
*
* @param code 对象编码
*/
public void setCode(String code) {
this.code = code;
}
/**
* 获取对象定位
*
* @return URI - 对象定位
*/
public String getUri() {
return uri;
}
/**
* 设置对象定位
*
* @param uri 对象定位
*/
public void setUri(String uri) {
this.uri = uri;
}
/**
* 获取对象操作
*
* @return OPERATE - 对象操作
*/
public String getOperate() {
return operate;
}
/**
* 设置对象操作
*
* @param operate 对象操作
*/
public void setOperate(String operate) {
this.operate = operate;
}
/**
* 获取对象类型
*
* @return TYPE - 对象类型
*/
public String getType() {
return type;
}
/**
* 设置对象类型
*
* @param type 对象类型
*/
public void setType(String type) {
this.type = type;
}
/**
* 获取所属域编码
*
* @return DOMAIN_CODE - 所属域编码
*/
public String getDomainCode() {
return domainCode;
}
/**
* 设置所属域编码
*
* @param domainCode 所属域编码
*/
public void setDomainCode(String domainCode) {
this.domainCode = domainCode;
}
/**
* 获取排序
*
* @return ORDER - 排序
*/
public Integer getSeqOrder() {
return seqOrder;
}
/**
* 设置排序
*
* @param seqOrder 排序
*/
public void setSeqOrder(Integer seqOrder) {
this.seqOrder = seqOrder;
}
/**
* 获取默认权限行为
*
* @return DEF_AUTH_OP - 默认权限行为
*/
public String getDefAuthOp() {
return defAuthOp;
}
/**
* 设置默认权限行为
*
* @param defAuthOp 默认权限行为
*/
public void setDefAuthOp(String defAuthOp) {
this.defAuthOp = defAuthOp;
}
/**
* 获取更新时间
*
* @return UPDATE_TIME - 更新时间
*/
public Date getUpdateTime() {
return updateTime;
}
/**
* 设置更新时间
*
* @param updateTime 更新时间
*/
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
/**
* 获取更新人
*
* @return UPDATE_USER - 更新人
*/
public String getUpdateUser() {
return updateUser;
}
/**
* 设置更新人
*
* @param updateUser 更新人
*/
public void setUpdateUser(String updateUser) {
this.updateUser = updateUser;
}
/**
* 获取域名称
* @return DOMAIN_NAME - 域名称
*/
public String getDomainName() {
return domainName;
}
/**
* 设置域名称
* @param domainName 域名称
*/
public void setDomainName(String domainName) {
this.domainName = domainName;
}
} | apache-2.0 |
kevinearls/camel | components/camel-spring/src/test/java/org/apache/camel/spring/management/SpringJmxDumpRoutesAsXmlTest.java | 2357 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spring.management;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.camel.spring.SpringTestSupport;
import org.junit.Test;
import org.springframework.context.support.AbstractXmlApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class SpringJmxDumpRoutesAsXmlTest extends SpringTestSupport {
@Override
protected boolean useJmx() {
return true;
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/management/SpringJmxDumpRouteAsXmlTest.xml");
}
protected MBeanServer getMBeanServer() {
return context.getManagementStrategy().getManagementAgent().getMBeanServer();
}
@Test
public void testJmxDumpRoutesAsXml() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=camel-1,type=context,name=\"camel-1\"");
String xml = (String) mbeanServer.invoke(on, "dumpRoutesAsXml", null, null);
assertNotNull(xml);
log.info(xml);
assertTrue(xml.contains("route"));
assertTrue(xml.contains("myRoute"));
assertTrue(xml.contains("myOtherRoute"));
assertTrue(xml.contains("direct:start"));
assertTrue(xml.contains("mock:result"));
assertTrue(xml.contains("seda:bar"));
assertTrue(xml.contains("mock:bar"));
}
}
| apache-2.0 |
calvinlotsberg/OurScape | src/com/rs/game/player/content/dungeoneering/DungeonPartyPlayer.java | 477 | package com.rs.game.player.content.dungeoneering;
import com.rs.game.player.Player;
public class DungeonPartyPlayer {
private Player player;
private int deaths;
public DungeonPartyPlayer(Player player) {
this.player = player;
}
public Player getPlayer() {
return player;
}
public void refreshDeaths() {
player.getPackets().sendConfigByFile(7554, deaths);
}
public void increaseDeaths() {
if (deaths == 15)
return;
deaths++;
refreshDeaths();
}
}
| apache-2.0 |
googleworkspace/java-samples | vault/vault-hold-migration-api/src/main/java/com/google/vault/chatmigration/QuickStart.java | 5255 | package com.google.vault.chatmigration;
import com.google.api.services.admin.directory.Directory;
import com.google.api.services.vault.v1.Vault;
import com.google.vault.chatmigration.MigrationHelper.MigrationOptions;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVPrinter;
public class QuickStart {
private static final Logger LOGGER = Logger.getLogger(QuickStart.class.getName());
private static boolean hasHelpOption(String... args) throws ParseException {
boolean hasHelp = false;
Options helpOptions = new Options().addOption(MigrationHelper.helpOption);
CommandLine cl = new DefaultParser().parse(helpOptions, args, true);
if (cl.hasOption(MigrationOptions.HELP.getOption())) {
hasHelp = true;
}
return hasHelp;
}
public static void main(String... args) {
Options options = MigrationHelper.buildOptions();
try {
if (hasHelpOption(args)) {
MigrationHelper.printHelp(options);
} else {
CommandLine line = new DefaultParser().parse(options, args);
String reportFile = line.getOptionValue("f");
String errorFile = line.getOptionValue("e");
if (line.hasOption(MigrationOptions.GENERATE_REPORT.getOption())) {
generateReport(reportFile);
} else if (line.hasOption(MigrationOptions.DUPLICATE_HOLDS.getOption())) {
duplicateHolds(
reportFile, errorFile, line.hasOption(MigrationOptions.INCLUDE_ROOMS.getOption()));
}
}
} catch (ParseException parseException) {
System.out.println(parseException.getMessage());
LOGGER.log(Level.WARNING, parseException.toString());
MigrationHelper.printHelp(options);
} catch (Exception exception) {
LOGGER.log(Level.SEVERE, exception.toString());
}
}
private static void generateReport(String holdsReportFile) throws Exception {
Directory directory = MigrationHelper.getDirectoryService();
DirectoryService directoryService = new DirectoryService(directory);
Vault vaultService = MigrationHelper.getVaultService();
System.out.println(
"--------------------------------------------------------------------------------------");
System.out.println(
" Starting Hold report generation. Holds will be exported to: " + holdsReportFile);
System.out.println();
CSVPrinter printer = getCSVPrinter(holdsReportFile);
HoldsReport holdReport = new HoldsReport(vaultService, directoryService, printer);
int totalHoldsCount = holdReport.buildReport();
System.out.println();
System.out.println(
" Hold report generated successfully. " + totalHoldsCount + " Gmail holds exported.");
System.out.println(
"--------------------------------------------------------------------------------------");
}
private static void duplicateHolds(String holdsReportFile, String errorFile, boolean includeRooms)
throws Exception {
Vault vaultService = MigrationHelper.getVaultService();
System.out.println(
"-----------------------------------------------------------------------------------------------");
System.out.println(
" Hangouts Chat hold creation started. Hold(s) will be picked from: " + holdsReportFile);
System.out.println();
CSVPrinter errorReport = getCSVPrinter(errorFile);
CSVParser parser =
CSVParser.parse(
new File(holdsReportFile),
Charset.defaultCharset(),
CSVFormat.DEFAULT
.withHeader(
HoldsReport.MATTER_ID,
HoldsReport.MATTER_NAME,
HoldsReport.HOLD_ID,
HoldsReport.HOLD_NAME,
HoldsReport.ORG_UNIT_ID,
HoldsReport.ORG_UNIT_PATH,
HoldsReport.ORG_UNIT_NAME,
HoldsReport.ACCOUNT_IDS,
HoldsReport.ACCOUNTS,
HoldsReport.CORPUS,
HoldsReport.TERMS,
HoldsReport.START_TIME,
HoldsReport.END_TIME)
.withSkipHeaderRecord());
DuplicateHold duplicateHold =
new DuplicateHold(parser, includeRooms, vaultService, errorReport);
int holdCount = duplicateHold.duplicateHolds();
System.out.println();
System.out.println(" Finished creating Hangouts Chat hold(s).");
System.out.println(
" Copied "
+ holdCount
+ " holds for Hangouts Chat. Please check "
+ errorFile
+ " for any errors.");
System.out.println(
"--------------------------------------------------------------------------------------");
}
private static CSVPrinter getCSVPrinter(String fileName) throws IOException {
return new CSVPrinter(new FileWriter(fileName), CSVFormat.DEFAULT);
}
}
| apache-2.0 |
ezsimple/java | spring/apt/src/main/java/net/ion/oadr2/specification/EiReport.java | 1059 | package net.ion.oadr2.specification;
import javax.jws.WebService;
import net.ion.open.oadr2.model.v20b.OadrCanceledReport;
import net.ion.open.oadr2.model.v20b.OadrResponse;
@WebService(name = "EiReport", targetNamespace = "http://services/")
public interface EiReport extends Profile20B{
public net.ion.open.oadr2.model.v20b.OadrRegisteredReport oadrRegisterReport(net.ion.open.oadr2.model.v20b.OadrRegisterReport payload) throws Throwable;
public OadrResponse oadrCreatedReport(net.ion.open.oadr2.model.v20b.OadrCreatedReport payload) throws Throwable;
public net.ion.open.oadr2.model.v20b.OadrCreatedReport oadrCreateReport(net.ion.open.oadr2.model.v20b.OadrCreateReport payload) throws Throwable;
public net.ion.open.oadr2.model.v20b.OadrUpdatedReport oadrUpdateReport(net.ion.open.oadr2.model.v20b.OadrUpdateReport payload) throws Throwable;
public OadrCanceledReport oadrCancelReport(net.ion.open.oadr2.model.v20b.OadrCancelReport payload) throws Throwable;
OadrResponse oadrCanceledReport(OadrCanceledReport payload) throws Throwable;
}
| apache-2.0 |
mdogan/hazelcast | hazelcast/src/main/java/com/hazelcast/ringbuffer/impl/RingbufferContainerCollector.java | 2633 | /*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.ringbuffer.impl;
import com.hazelcast.config.MergePolicyConfig;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.internal.services.ObjectNamespace;
import com.hazelcast.spi.impl.merge.AbstractContainerCollector;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
class RingbufferContainerCollector extends AbstractContainerCollector<RingbufferContainer> {
private final Map<Integer, Map<ObjectNamespace, RingbufferContainer>> containers;
RingbufferContainerCollector(NodeEngine nodeEngine, Map<Integer, Map<ObjectNamespace, RingbufferContainer>> containers) {
super(nodeEngine);
this.containers = containers;
}
@Override
protected Iterator<RingbufferContainer> containerIterator(int partitionId) {
Map<ObjectNamespace, RingbufferContainer> containerMap = containers.get(partitionId);
if (containerMap == null) {
return new EmptyIterator();
}
return containerMap.values().iterator();
}
@Override
protected MergePolicyConfig getMergePolicyConfig(RingbufferContainer container) {
return container.getConfig().getMergePolicyConfig();
}
@Override
protected void destroy(RingbufferContainer container) {
container.clear();
}
@Override
protected void destroyBackup(RingbufferContainer container) {
container.clear();
}
@Override
protected boolean isMergeable(RingbufferContainer container) {
String containerServiceName = container.getNamespace().getServiceName();
return RingbufferService.SERVICE_NAME.equals(containerServiceName);
}
@Override
protected int getMergingValueCount() {
int size = 0;
for (Collection<RingbufferContainer> containers : getCollectedContainers().values()) {
for (RingbufferContainer container : containers) {
size += container.size();
}
}
return size;
}
}
| apache-2.0 |
sshcherbakov/incubator-geode | gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/tier/sockets/command/GetEntry70.java | 2855 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache.tier.sockets.command;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.Region.Entry;
import com.gemstone.gemfire.internal.cache.EntrySnapshot;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.NonLocalRegionEntry;
import com.gemstone.gemfire.internal.cache.tier.Command;
import com.gemstone.gemfire.internal.cache.tier.sockets.ServerConnection;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
/**
* getEntry(key) operation performed on server.
* Extends Request, and overrides getValueAndIsObject() in Request
* so as to not invoke loader.
* @author sbawaska
* @since 6.6
*/
public class GetEntry70 extends Get70 {
private final static GetEntry70 singleton = new GetEntry70();
public static Command getCommand() {
return singleton;
}
protected GetEntry70() {
}
@Override
protected Get70.Entry getEntry(Region region, Object key,
Object callbackArg, ServerConnection servConn) {
return getValueAndIsObject(region, key, callbackArg, servConn);
}
@Override
public Get70.Entry getValueAndIsObject(Region region, Object key,
Object callbackArg, ServerConnection servConn) {
LocalRegion lregion = (LocalRegion)region;
Object data = null;
Region.Entry entry = region.getEntry(key);
if (logger.isDebugEnabled()) {
logger.debug("GetEntryCommand: for key: {} returning entry: {}", key, entry);
}
VersionTag tag = null;
if (entry != null) {
EntrySnapshot snap = new EntrySnapshot();
NonLocalRegionEntry re = new NonLocalRegionEntry(entry, lregion);
snap.setRegionEntry(re);
snap.setRegion(lregion);
data = snap;
tag = snap.getVersionTag();
}
Get70.Entry result = new Get70.Entry();
result.value = data;
result.isObject = true;
result.keyNotPresent = false;
result.versionTag = tag;
return result;
}
}
| apache-2.0 |
rishabhmonga/ehcache3 | 107/src/main/java/org/ehcache/jsr107/Eh107CacheStatisticsMXBean.java | 12951 | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.jsr107;
import org.ehcache.Cache;
import org.ehcache.core.InternalCache;
import org.ehcache.core.statistics.CacheOperationOutcomes;
import org.ehcache.core.statistics.StoreOperationOutcomes;
import org.ehcache.core.statistics.BulkOps;
import org.terracotta.context.ContextManager;
import org.terracotta.context.TreeNode;
import org.terracotta.context.query.Matcher;
import org.terracotta.context.query.Matchers;
import org.terracotta.context.query.Query;
import org.terracotta.statistics.OperationStatistic;
import org.terracotta.statistics.StatisticsManager;
import org.terracotta.statistics.derived.LatencySampling;
import org.terracotta.statistics.derived.MinMaxAverage;
import org.terracotta.statistics.jsr166e.LongAdder;
import org.terracotta.statistics.observer.ChainedOperationObserver;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import static java.util.EnumSet.allOf;
import static org.terracotta.context.query.Matchers.attributes;
import static org.terracotta.context.query.Matchers.context;
import static org.terracotta.context.query.Matchers.hasAttribute;
import static org.terracotta.context.query.QueryBuilder.queryBuilder;
/**
* @author Ludovic Orban
*/
class Eh107CacheStatisticsMXBean extends Eh107MXBean implements javax.cache.management.CacheStatisticsMXBean {
private final CompensatingCounters compensatingCounters = new CompensatingCounters();
private final OperationStatistic<CacheOperationOutcomes.GetOutcome> get;
private final OperationStatistic<CacheOperationOutcomes.PutOutcome> put;
private final OperationStatistic<CacheOperationOutcomes.RemoveOutcome> remove;
private final OperationStatistic<CacheOperationOutcomes.PutIfAbsentOutcome> putIfAbsent;
private final OperationStatistic<CacheOperationOutcomes.ReplaceOutcome> replace;
private final OperationStatistic<CacheOperationOutcomes.ConditionalRemoveOutcome> conditionalRemove;
private final OperationStatistic<StoreOperationOutcomes.EvictionOutcome> authorityEviction;
private final Map<BulkOps, LongAdder> bulkMethodEntries;
private final LatencyMonitor<CacheOperationOutcomes.GetOutcome> averageGetTime;
private final LatencyMonitor<CacheOperationOutcomes.PutOutcome> averagePutTime;
private final LatencyMonitor<CacheOperationOutcomes.RemoveOutcome> averageRemoveTime;
Eh107CacheStatisticsMXBean(String cacheName, Eh107CacheManager cacheManager, InternalCache<?, ?> cache) {
super(cacheName, cacheManager, "CacheStatistics");
this.bulkMethodEntries = cache.getBulkMethodEntries();
get = findCacheStatistic(cache, CacheOperationOutcomes.GetOutcome.class, "get");
put = findCacheStatistic(cache, CacheOperationOutcomes.PutOutcome.class, "put");
remove = findCacheStatistic(cache, CacheOperationOutcomes.RemoveOutcome.class, "remove");
putIfAbsent = findCacheStatistic(cache, CacheOperationOutcomes.PutIfAbsentOutcome.class, "putIfAbsent");
replace = findCacheStatistic(cache, CacheOperationOutcomes.ReplaceOutcome.class, "replace");
conditionalRemove = findCacheStatistic(cache, CacheOperationOutcomes.ConditionalRemoveOutcome.class, "conditionalRemove");
authorityEviction = findAuthoritativeTierStatistic(cache, StoreOperationOutcomes.EvictionOutcome.class, "eviction");
averageGetTime = new LatencyMonitor<CacheOperationOutcomes.GetOutcome>(allOf(CacheOperationOutcomes.GetOutcome.class));
get.addDerivedStatistic(averageGetTime);
averagePutTime = new LatencyMonitor<CacheOperationOutcomes.PutOutcome>(allOf(CacheOperationOutcomes.PutOutcome.class));
put.addDerivedStatistic(averagePutTime);
averageRemoveTime= new LatencyMonitor<CacheOperationOutcomes.RemoveOutcome>(allOf(CacheOperationOutcomes.RemoveOutcome.class));
remove.addDerivedStatistic(averageRemoveTime);
}
@Override
public void clear() {
compensatingCounters.snapshot();
averageGetTime.clear();
averagePutTime.clear();
averageRemoveTime.clear();
}
@Override
public long getCacheHits() {
return normalize(getHits() - compensatingCounters.cacheHits - compensatingCounters.bulkGetHits);
}
@Override
public float getCacheHitPercentage() {
long cacheHits = getCacheHits();
return normalize((float) cacheHits / (cacheHits + getCacheMisses())) * 100.0f;
}
@Override
public long getCacheMisses() {
return normalize(getMisses() - compensatingCounters.cacheMisses - compensatingCounters.bulkGetMiss);
}
@Override
public float getCacheMissPercentage() {
long cacheMisses = getCacheMisses();
return normalize((float) cacheMisses / (getCacheHits() + cacheMisses)) * 100.0f;
}
@Override
public long getCacheGets() {
return normalize(getHits() + getMisses()
- compensatingCounters.cacheGets
- compensatingCounters.bulkGetHits
- compensatingCounters.bulkGetMiss);
}
@Override
public long getCachePuts() {
return normalize(getBulkCount(BulkOps.PUT_ALL) - compensatingCounters.bulkPuts +
put.sum(EnumSet.of(CacheOperationOutcomes.PutOutcome.PUT)) +
put.sum(EnumSet.of(CacheOperationOutcomes.PutOutcome.UPDATED)) +
putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) +
replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT)) -
compensatingCounters.cachePuts);
}
@Override
public long getCacheRemovals() {
return normalize(getBulkCount(BulkOps.REMOVE_ALL) - compensatingCounters.bulkRemovals +
remove.sum(EnumSet.of(CacheOperationOutcomes.RemoveOutcome.SUCCESS)) +
conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS)) -
compensatingCounters.cacheRemovals);
}
@Override
public long getCacheEvictions() {
return normalize(authorityEviction.sum(EnumSet.of(StoreOperationOutcomes.EvictionOutcome.SUCCESS)) - compensatingCounters.cacheEvictions);
}
@Override
public float getAverageGetTime() {
return (float) averageGetTime.value();
}
@Override
public float getAveragePutTime() {
return (float) averagePutTime.value();
}
@Override
public float getAverageRemoveTime() {
return (float) averageRemoveTime.value();
}
private long getMisses() {
return getBulkCount(BulkOps.GET_ALL_MISS) +
get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.MISS_NO_LOADER, CacheOperationOutcomes.GetOutcome.MISS_WITH_LOADER)) +
putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) +
replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.MISS_NOT_PRESENT)) +
conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_MISSING));
}
private long getHits() {
return getBulkCount(BulkOps.GET_ALL_HITS) +
get.sum(EnumSet.of(CacheOperationOutcomes.GetOutcome.HIT_NO_LOADER, CacheOperationOutcomes.GetOutcome.HIT_WITH_LOADER)) +
putIfAbsent.sum(EnumSet.of(CacheOperationOutcomes.PutIfAbsentOutcome.PUT)) +
replace.sum(EnumSet.of(CacheOperationOutcomes.ReplaceOutcome.HIT, CacheOperationOutcomes.ReplaceOutcome.MISS_PRESENT)) +
conditionalRemove.sum(EnumSet.of(CacheOperationOutcomes.ConditionalRemoveOutcome.SUCCESS, CacheOperationOutcomes.ConditionalRemoveOutcome.FAILURE_KEY_PRESENT));
}
private long getBulkCount(BulkOps bulkOps) {
return bulkMethodEntries.get(bulkOps).longValue();
}
private static long normalize(long value) {
return Math.max(0, value);
}
private static float normalize(float value) {
if (Float.isNaN(value)) {
return 0.0f;
}
return Math.min(1.0f, Math.max(0.0f, value));
}
static <T extends Enum<T>> OperationStatistic<T> findCacheStatistic(Cache<?, ?> cache, Class<T> type, String statName) {
Query query = queryBuilder()
.children()
.filter(context(attributes(Matchers.<Map<String, Object>>allOf(hasAttribute("name", statName), hasAttribute("type", type)))))
.build();
Set<TreeNode> result = query.execute(Collections.singleton(ContextManager.nodeFor(cache)));
if (result.size() > 1) {
throw new RuntimeException("result must be unique");
}
if (result.isEmpty()) {
throw new RuntimeException("result must not be null");
}
return (OperationStatistic<T>) result.iterator().next().getContext().attributes().get("this");
}
<T extends Enum<T>> OperationStatistic<T> findAuthoritativeTierStatistic(Cache<?, ?> cache, Class<T> type, String statName) {
Query storeQuery = queryBuilder()
.children()
.children()
.filter(context(attributes(Matchers.<Map<String, Object>>allOf(
hasAttribute("tags", new Matcher<Set<String>>() {
@Override
protected boolean matchesSafely(Set<String> object) {
return object.containsAll(Collections.singleton("store"));
}
})))))
.build();
Set<TreeNode> storeResult = storeQuery.execute(Collections.singleton(ContextManager.nodeFor(cache)));
if (storeResult.size() > 1) {
throw new RuntimeException("store result must be unique");
}
if (storeResult.isEmpty()) {
throw new RuntimeException("store result must not be null");
}
Object authoritativeTier = storeResult.iterator().next().getContext().attributes().get("authoritativeTier");
Query statQuery = queryBuilder()
.children()
.filter(context(attributes(Matchers.<Map<String, Object>>allOf(hasAttribute("name", statName), hasAttribute("type", type)))))
.build();
Set<TreeNode> statResult = statQuery.execute(Collections.singleton(StatisticsManager.nodeFor(authoritativeTier)));
if (statResult.size() > 1) {
throw new RuntimeException("stat result must be unique");
}
if (statResult.isEmpty()) {
throw new RuntimeException("stat result must not be null");
}
return (OperationStatistic) statResult.iterator().next().getContext().attributes().get("this");
}
class CompensatingCounters {
volatile long cacheHits;
volatile long cacheMisses;
volatile long cacheGets;
volatile long bulkGetHits;
volatile long bulkGetMiss;
volatile long cachePuts;
volatile long bulkPuts;
volatile long cacheRemovals;
volatile long bulkRemovals;
volatile long cacheEvictions;
void snapshot() {
cacheHits += Eh107CacheStatisticsMXBean.this.getCacheHits();
cacheMisses += Eh107CacheStatisticsMXBean.this.getCacheMisses();
cacheGets += Eh107CacheStatisticsMXBean.this.getCacheGets();
bulkGetHits += Eh107CacheStatisticsMXBean.this.getBulkCount(BulkOps.GET_ALL_HITS);
bulkGetMiss += Eh107CacheStatisticsMXBean.this.getBulkCount(BulkOps.GET_ALL_MISS);
cachePuts += Eh107CacheStatisticsMXBean.this.getCachePuts();
bulkPuts += Eh107CacheStatisticsMXBean.this.getBulkCount(BulkOps.PUT_ALL);
cacheRemovals += Eh107CacheStatisticsMXBean.this.getCacheRemovals();
bulkRemovals += Eh107CacheStatisticsMXBean.this.getBulkCount(BulkOps.REMOVE_ALL);
cacheEvictions += Eh107CacheStatisticsMXBean.this.getCacheEvictions();
}
}
private static class LatencyMonitor<T extends Enum<T>> implements ChainedOperationObserver<T> {
private final LatencySampling<T> sampling;
private volatile MinMaxAverage average;
public LatencyMonitor(Set<T> targets) {
this.sampling = new LatencySampling<T>(targets, 1.0);
this.average = new MinMaxAverage();
sampling.addDerivedStatistic(average);
}
@Override
public void begin(long time) {
sampling.begin(time);
}
@Override
public void end(long time, T result) {
sampling.end(time, result);
}
@Override
public void end(long time, T result, long... parameters) {
sampling.end(time, result, parameters);
}
public double value() {
Double value = average.mean();
if (value == null) {
//Someone involved with 107 can't do math
return 0;
} else {
//We use nanoseconds, 107 uses microseconds
return value / 1000f;
}
}
public synchronized void clear() {
sampling.removeDerivedStatistic(average);
average = new MinMaxAverage();
sampling.addDerivedStatistic(average);
}
}
}
| apache-2.0 |
joshthornhill/spring-boot | spring-boot/src/main/java/org/springframework/boot/system/EmbeddedServerPortFileWriter.java | 4589 | /*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.system;
import java.io.File;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.boot.context.embedded.EmbeddedServletContainerInitializedEvent;
import org.springframework.boot.context.embedded.EmbeddedWebApplicationContext;
import org.springframework.context.ApplicationListener;
import org.springframework.util.Assert;
import org.springframework.util.FileCopyUtils;
import org.springframework.util.StringUtils;
/**
* An {@link ApplicationListener} that saves embedded server port and management port into
* file. This application listener will be triggered whenever the servlet container
* starts, and the file name can be overridden at runtime with a System property or
* environment variable named "PORTFILE" or "portfile".
*
* @author David Liu
* @author Phillip Webb
* @author Andy Wilkinson
* @since 1.4.0
*/
public class EmbeddedServerPortFileWriter
implements ApplicationListener<EmbeddedServletContainerInitializedEvent> {
private static final String DEFAULT_FILE_NAME = "application.port";
private static final String[] PROPERTY_VARIABLES = { "PORTFILE", "portfile" };
private static final Log logger = LogFactory
.getLog(EmbeddedServerPortFileWriter.class);
private final File file;
/**
* Create a new {@link EmbeddedServerPortFileWriter} instance using the filename
* 'application.port'.
*/
public EmbeddedServerPortFileWriter() {
this(new File(DEFAULT_FILE_NAME));
}
/**
* Create a new {@link EmbeddedServerPortFileWriter} instance with a specified
* filename.
* @param filename the name of file containing port
*/
public EmbeddedServerPortFileWriter(String filename) {
this(new File(filename));
}
/**
* Create a new {@link EmbeddedServerPortFileWriter} instance with a specified file.
* @param file the file containing port
*/
public EmbeddedServerPortFileWriter(File file) {
Assert.notNull(file, "File must not be null");
String override = SystemProperties.get(PROPERTY_VARIABLES);
if (override != null) {
this.file = new File(override);
}
else {
this.file = file;
}
}
@Override
public void onApplicationEvent(EmbeddedServletContainerInitializedEvent event) {
File portFile = getPortFile(event.getApplicationContext());
try {
String port = String.valueOf(event.getEmbeddedServletContainer().getPort());
createParentFolder(portFile);
FileCopyUtils.copy(port.getBytes(), portFile);
portFile.deleteOnExit();
}
catch (Exception ex) {
logger.warn(String.format("Cannot create port file %s", this.file));
}
}
/**
* Return the actual port file that should be written for the given application
* context. The default implementation builds a file from the source file and the
* application context namespace.
* @param applicationContext the source application context
* @return the file that should be written
*/
protected File getPortFile(EmbeddedWebApplicationContext applicationContext) {
String contextName = applicationContext.getNamespace();
if (StringUtils.isEmpty(contextName)) {
return this.file;
}
String name = this.file.getName();
String extension = StringUtils.getFilenameExtension(this.file.getName());
name = name.substring(0, name.length() - extension.length() - 1);
if (isUpperCase(name)) {
name = name + "-" + contextName.toUpperCase();
}
else {
name = name + "-" + contextName.toLowerCase();
}
if (StringUtils.hasLength(extension)) {
name = name + "." + extension;
}
return new File(this.file.getParentFile(), name);
}
private boolean isUpperCase(String name) {
for (int i = 0; i < name.length(); i++) {
if (Character.isLetter(name.charAt(i))
&& !Character.isUpperCase(name.charAt(i))) {
return false;
}
}
return true;
}
private void createParentFolder(File file) {
File parent = file.getParentFile();
if (parent != null) {
parent.mkdirs();
}
}
}
| apache-2.0 |
dragonzhou/humor | src/ca/pfv/spmf/test/MainTestTRuleGrowth_withStrings.java | 1204 | package ca.pfv.spmf.test;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import ca.pfv.spmf.algorithms.sequential_rules.trulegrowth_with_strings.AlgoTRuleGrowth_withStrings;
/**
* * Example of how to use the TRULEGROWTH algorithm with strings in source code.
* @author Philippe Fournier-Viger (Copyright 2010)
*/
public class MainTestTRuleGrowth_withStrings {
public static void main(String [] arg) throws IOException{
String input = fileToPath("contextPrefixSpanStrings.txt"); // the database
String output = ".//output.txt"; // the path for saving the frequent itemsets found
// Applying RuleGROWTH algorithm with minsup = 3 sequences and minconf = 0.5
double minsup = 0.7;
double minconf = 0.8;
int windowSize = 3;
AlgoTRuleGrowth_withStrings algo = new AlgoTRuleGrowth_withStrings();
algo.runAlgorithm(minsup, minconf, input, output, windowSize);
// print statistics
algo.printStats();
}
public static String fileToPath(String filename) throws UnsupportedEncodingException{
URL url = MainTestTRuleGrowth_withStrings.class.getResource(filename);
return java.net.URLDecoder.decode(url.getPath(),"UTF-8");
}
}
| apache-2.0 |
spinnaker/clouddriver | clouddriver-alicloud/src/test/java/com/netflix/spinnaker/clouddriver/alicloud/deploy/ops/UpsertAliCloudSecurityGroupAtomicOperationTest.java | 4360 | /*
* Copyright 2019 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.clouddriver.alicloud.deploy.ops;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import com.aliyuncs.ecs.model.v20140526.*;
import com.aliyuncs.ecs.model.v20140526.DescribeSecurityGroupAttributeResponse.Permission;
import com.aliyuncs.ecs.model.v20140526.DescribeSecurityGroupsResponse.SecurityGroup;
import com.aliyuncs.exceptions.ClientException;
import com.netflix.spinnaker.clouddriver.alicloud.deploy.description.UpsertAliCloudSecurityGroupDescription;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
public class UpsertAliCloudSecurityGroupAtomicOperationTest extends CommonAtomicOperation {
@Before
public void testBefore() throws ClientException {
when(client.getAcsResponse(any()))
.thenAnswer(new DescribeSecurityGroupsAnswer())
.thenAnswer(new CreateSecurityGroupAnswer())
.thenAnswer(new DescribeSecurityGroupAttributeAnswer())
.thenAnswer(new AuthorizeSecurityGroupAnswer());
}
@Test
public void testOperate() {
UpsertAliCloudSecurityGroupAtomicOperation operation =
new UpsertAliCloudSecurityGroupAtomicOperation(
buildDescription(), clientFactory, objectMapper);
operation.operate(priorOutputs);
}
private UpsertAliCloudSecurityGroupDescription buildDescription() {
UpsertAliCloudSecurityGroupDescription description =
new UpsertAliCloudSecurityGroupDescription();
description.setRegion(REGION);
description.setCredentials(credentials);
description.setSecurityGroupName("test-SecurityGroupName");
List<AuthorizeSecurityGroupRequest> securityGroupIngress = new ArrayList<>();
AuthorizeSecurityGroupRequest request = new AuthorizeSecurityGroupRequest();
request.setIpProtocol("tcp");
request.setPortRange("1/200");
request.setSourceCidrIp("10.0.0.0/8");
securityGroupIngress.add(request);
description.setSecurityGroupIngress(securityGroupIngress);
return description;
}
private class DescribeSecurityGroupsAnswer implements Answer<DescribeSecurityGroupsResponse> {
@Override
public DescribeSecurityGroupsResponse answer(InvocationOnMock invocation) throws Throwable {
DescribeSecurityGroupsResponse response = new DescribeSecurityGroupsResponse();
List<SecurityGroup> securityGroups = new ArrayList<>();
response.setSecurityGroups(securityGroups);
return response;
}
}
private class CreateSecurityGroupAnswer implements Answer<CreateSecurityGroupResponse> {
@Override
public CreateSecurityGroupResponse answer(InvocationOnMock invocation) throws Throwable {
CreateSecurityGroupResponse response = new CreateSecurityGroupResponse();
response.setSecurityGroupId("test-SecurityGroupId");
return response;
}
}
private class DescribeSecurityGroupAttributeAnswer
implements Answer<DescribeSecurityGroupAttributeResponse> {
@Override
public DescribeSecurityGroupAttributeResponse answer(InvocationOnMock invocation)
throws Throwable {
DescribeSecurityGroupAttributeResponse response =
new DescribeSecurityGroupAttributeResponse();
List<Permission> permissions = new ArrayList<>();
response.setPermissions(permissions);
return response;
}
}
private class AuthorizeSecurityGroupAnswer implements Answer<AuthorizeSecurityGroupResponse> {
@Override
public AuthorizeSecurityGroupResponse answer(InvocationOnMock invocation) throws Throwable {
AuthorizeSecurityGroupResponse response = new AuthorizeSecurityGroupResponse();
return response;
}
}
}
| apache-2.0 |
stafur/trustpluscoinj | core/src/main/java/com/google/bitcoin/core/Address.java | 7447 | /**
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.core;
import com.google.bitcoin.params.MainNetParams;
import com.google.bitcoin.params.TestNet3Params;
import com.google.bitcoin.script.Script;
import javax.annotation.Nullable;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* <p>A Bitcoin address looks like 1MsScoe2fTJoq4ZPdQgqyhgWeoNamYPevy and is derived from an elliptic curve public key
* plus a set of network parameters. Not to be confused with a {@link PeerAddress} or {@link AddressMessage}
* which are about network (TCP) addresses.</p>
*
* <p>A standard address is built by taking the RIPE-MD160 hash of the public key bytes, with a version prefix and a
* checksum suffix, then encoding it textually as base58. The version prefix is used to both denote the network for
* which the address is valid (see {@link NetworkParameters}, and also to indicate how the bytes inside the address
* should be interpreted. Whilst almost all addresses today are hashes of public keys, another (currently unsupported
* type) can contain a hash of a script instead.</p>
*/
public class Address extends VersionedChecksummedBytes {
/**
* An address is a RIPEMD160 hash of a public key, therefore is always 160 bits or 20 bytes.
*/
public static final int LENGTH = 20;
/**
* Construct an address from parameters, the address version, and the hash160 form. Example:<p>
*
* <pre>new Address(NetworkParameters.prodNet(), NetworkParameters.getAddressHeader(), Hex.decode("4a22c3c4cbb31e4d03b15550636762bda0baf85a"));</pre>
*/
public Address(NetworkParameters params, int version, byte[] hash160) throws WrongNetworkException {
super(version, hash160);
checkNotNull(params);
checkArgument(hash160.length == 20, "Addresses are 160-bit hashes, so you must provide 20 bytes");
if (!isAcceptableVersion(params, version))
throw new WrongNetworkException(version, params.getAcceptableAddressCodes());
}
/** Returns an Address that represents the given P2SH script hash. */
public static Address fromP2SHHash(NetworkParameters params, byte[] hash160) {
try {
return new Address(params, params.getP2SHHeader(), hash160);
} catch (WrongNetworkException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
/** Returns an Address that represents the script hash extracted from the given scriptPubKey */
public static Address fromP2SHScript(NetworkParameters params, Script scriptPubKey) {
checkArgument(scriptPubKey.isPayToScriptHash(), "Not a P2SH script");
return fromP2SHHash(params, scriptPubKey.getPubKeyHash());
}
/**
* Construct an address from parameters and the hash160 form. Example:<p>
*
* <pre>new Address(NetworkParameters.prodNet(), Hex.decode("4a22c3c4cbb31e4d03b15550636762bda0baf85a"));</pre>
*/
public Address(NetworkParameters params, byte[] hash160) {
super(params.getAddressHeader(), hash160);
checkArgument(hash160.length == 20, "Addresses are 160-bit hashes, so you must provide 20 bytes");
}
/**
* Construct an address from parameters and the standard "human readable" form. Example:<p>
*
* <pre>new Address(NetworkParameters.prodNet(), "17kzeh4N8g49GFvdDzSf8PjaPfyoD1MndL");</pre><p>
*
* @param params The expected NetworkParameters or null if you don't want validation.
* @param address The textual form of the address, such as "17kzeh4N8g49GFvdDzSf8PjaPfyoD1MndL"
* @throws AddressFormatException if the given address doesn't parse or the checksum is invalid
* @throws WrongNetworkException if the given address is valid but for a different chain (eg testnet vs prodnet)
*/
public Address(@Nullable NetworkParameters params, String address) throws AddressFormatException {
super(address);
if (params != null) {
if (!isAcceptableVersion(params, version)) {
throw new WrongNetworkException(version, params.getAcceptableAddressCodes());
}
}
}
/** The (big endian) 20 byte hash that is the core of a Bitcoin address. */
public byte[] getHash160() {
return bytes;
}
/*
* Returns true if this address is a Pay-To-Script-Hash (P2SH) address.
* See also https://github.com/bitcoin/bips/blob/master/bip-0013.mediawiki: Address Format for pay-to-script-hash
*/
public boolean isP2SHAddress() {
final NetworkParameters parameters = getParameters();
return parameters != null && this.version == parameters.p2shHeader;
}
/**
* Examines the version byte of the address and attempts to find a matching NetworkParameters. If you aren't sure
* which network the address is intended for (eg, it was provided by a user), you can use this to decide if it is
* compatible with the current wallet. You should be able to handle a null response from this method. Note that the
* parameters returned is not necessarily the same as the one the Address was created with.
*
* @return a NetworkParameters representing the network the address is intended for, or null if unknown.
*/
@Nullable
public NetworkParameters getParameters() {
// TODO: There should be a more generic way to get all supported networks.
// NetworkParameters[] networks = { TestNet3Params.get(), MainNetParams.get() };
NetworkParameters[] networks = { MainNetParams.get() };
for (NetworkParameters params : networks) {
if (isAcceptableVersion(params, version)) {
return params;
}
}
return null;
}
/**
* Given an address, examines the version byte and attempts to find a matching NetworkParameters. If you aren't sure
* which network the address is intended for (eg, it was provided by a user), you can use this to decide if it is
* compatible with the current wallet.
* @return a NetworkParameters or null if the string wasn't of a known version.
*/
@Nullable
public static NetworkParameters getParametersFromAddress(String address) throws AddressFormatException {
try {
return new Address(null, address).getParameters();
} catch (WrongNetworkException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
/**
* Check if a given address version is valid given the NetworkParameters.
*/
private static boolean isAcceptableVersion(NetworkParameters params, int version) {
for (int v : params.getAcceptableAddressCodes()) {
if (version == v) {
return true;
}
}
return false;
}
}
| apache-2.0 |
porcelli-forks/uberfire-extensions | uberfire-runtime-plugins/uberfire-runtime-plugins-client/src/test/java/org/uberfire/ext/plugin/client/editor/RuntimePluginBaseEditorTest.java | 3822 | /*
* Copyright 2015 JBoss, by Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.plugin.client.editor;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.jboss.errai.common.client.api.Caller;
import org.jboss.errai.common.client.api.RemoteCallback;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.uberfire.backend.vfs.ObservablePath;
import org.uberfire.backend.vfs.Path;
import org.uberfire.client.workbench.type.ClientResourceType;
import org.uberfire.ext.plugin.model.Media;
import org.uberfire.ext.plugin.model.PluginContent;
import org.uberfire.ext.plugin.model.PluginSimpleContent;
import org.uberfire.ext.plugin.model.PluginType;
import org.uberfire.ext.plugin.service.PluginServices;
import org.uberfire.mocks.CallerMock;
import org.uberfire.mvp.ParameterizedCommand;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.*;
@RunWith(GwtMockitoTestRunner.class)
public class RuntimePluginBaseEditorTest {
private PluginServices pluginServices;
private CallerMock<PluginServices> callerMock;
RemoteCallback<PluginContent> successCallBack;
RuntimePluginBaseView baseEditorView = null;
private RuntimePluginBaseEditor editor;
@Before
public void setup() {
pluginServices = mock( PluginServices.class );
callerMock = new CallerMock<PluginServices>( pluginServices );
editor = createRuntimePluginBaseEditor();
successCallBack = mock( RemoteCallback.class );
baseEditorView = mock( RuntimePluginBaseView.class );
}
@Test
public void loadContentTest() {
final PluginContent pluginContent = mock( PluginContent.class );
when( pluginServices.getPluginContent( Matchers.<Path>any() ) ).thenReturn( pluginContent );
assertNull( editor.getOriginalHash() );
editor.loadContent();
verify( pluginServices ).getPluginContent( Matchers.<Path>any() );
verify( baseEditorView ).setFramework( anyCollection() );
verify( baseEditorView ).setupContent( eq(pluginContent), Matchers.<ParameterizedCommand<Media>>any() );
verify( baseEditorView ).hideBusyIndicator();
assertNotNull( editor.getOriginalHash() );
}
private RuntimePluginBaseEditor createRuntimePluginBaseEditor() {
return new RuntimePluginBaseEditor( baseEditorView ) {
@Override
protected PluginType getPluginType() {
return PluginType.DYNAMIC_MENU;
}
@Override
protected ClientResourceType getResourceType() {
return null;
}
@Override
RuntimePluginBaseView view() {
return baseEditorView;
}
@Override
Caller<PluginServices> getPluginServices() {
return callerMock;
}
@Override
ObservablePath getCurrentPath() {
return mock( ObservablePath.class );
}
@Override
public PluginSimpleContent getContent() {
return mock( PluginSimpleContent.class );
}
};
}
} | apache-2.0 |
wangsongpeng/jdk-src | src/main/java/java/lang/management/CompilationMXBean.java | 3057 | /*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.lang.management;
/**
* The management interface for the compilation system of
* the Java virtual machine.
*
* <p> A Java virtual machine has a single instance of the implementation
* class of this interface. This instance implementing this interface is
* an <a href="ManagementFactory.html#MXBean">MXBean</a>
* that can be obtained by calling
* the {@link ManagementFactory#getCompilationMXBean} method or
* from the {@link ManagementFactory#getPlatformMBeanServer
* platform <tt>MBeanServer</tt>} method.
*
* <p>The <tt>ObjectName</tt> for uniquely identifying the MXBean for
* the compilation system within an MBeanServer is:
* <blockquote>
* {@link ManagementFactory#COMPILATION_MXBEAN_NAME
* <tt>java.lang:type=Compilation</tt>}
* </blockquote>
*
* It can be obtained by calling the
* {@link PlatformManagedObject#getObjectName} method.
*
* @see ManagementFactory#getPlatformMXBeans(Class)
* @see <a href="../../../javax/management/package-summary.html">
* JMX Specification.</a>
* @see <a href="package-summary.html#examples">
* Ways to Access MXBeans</a>
*
* @author Mandy Chung
* @since 1.5
*/
public interface CompilationMXBean extends PlatformManagedObject {
/**
* Returns the name of the Just-in-time (JIT) compiler.
*
* @return the name of the JIT compiler.
*/
public String getName();
/**
* Tests if the Java virtual machine supports the monitoring of
* compilation time.
*
* @return <tt>true</tt> if the monitoring of compilation time is
* supported ; <tt>false</tt> otherwise.
*/
public boolean isCompilationTimeMonitoringSupported();
/**
* Returns the approximate accumulated elapsed time (in milliseconds)
* spent in compilation.
* If multiple threads are used for compilation, this value is
* summation of the approximate time that each thread spent in compilation.
*
* <p>This method is optionally supported by the platform.
* A Java virtual machine implementation may not support the compilation
* time monitoring. The {@link #isCompilationTimeMonitoringSupported}
* method can be used to determine if the Java virtual machine
* supports this operation.
*
* <p> This value does not indicate the level of performance of
* the Java virtual machine and is not intended for performance comparisons
* of different virtual machine implementations.
* The implementations may have different definitions and different
* measurements of the compilation time.
*
* @return Compilation time in milliseconds
* @throws UnsupportedOperationException if the Java
* virtual machine does not support
* this operation.
*
*/
public long getTotalCompilationTime();
}
| apache-2.0 |
jay-hodgson/SynapseWebClient | src/main/java/org/sagebionetworks/web/client/widget/entity/SharingAndDataUseConditionWidgetView.java | 348 | package org.sagebionetworks.web.client.widget.entity;
import org.sagebionetworks.repo.model.entitybundle.v2.EntityBundle;
import org.sagebionetworks.web.client.SynapseView;
import com.google.gwt.user.client.ui.IsWidget;
public interface SharingAndDataUseConditionWidgetView extends IsWidget, SynapseView {
void configure(EntityBundle bundle);
}
| apache-2.0 |
bradwoo8621/nest-old | arcteryx-meta/src/main/java/com/github/nest/arcteryx/meta/internal/AbstractStaticCodeResourceOperator.java | 1467 | /**
*
*/
package com.github.nest.arcteryx.meta.internal;
import com.github.nest.arcteryx.meta.IResourceDescriptor;
import com.github.nest.arcteryx.meta.IResourceOperator;
/**
* static code resource operator. code of operator will be created by
* {@linkplain #createCode()} and cannot be changed.
*
* @author brad.wu
*/
public abstract class AbstractStaticCodeResourceOperator implements IResourceOperator {
private String code = null;
private IResourceDescriptor resourceDescriptor = null;
public AbstractStaticCodeResourceOperator() {
this.code = createCode();
}
/**
* create code
*
* @return
*/
protected abstract String createCode();
/**
* (non-Javadoc)
*
* @see com.github.nest.arcteryx.meta.IResourceOperator#getCode()
*/
@Override
public final String getCode() {
return this.code;
}
/**
* (non-Javadoc)
*
* @see com.github.nest.arcteryx.meta.IResourceOperator#setResourceDescriptor(com.github.nest.arcteryx.meta.IResourceDescriptor)
*/
@Override
public void setResourceDescriptor(IResourceDescriptor resourceDescriptor) {
this.resourceDescriptor = resourceDescriptor;
}
/**
* (non-Javadoc)
*
* @see com.github.nest.arcteryx.meta.IResourceOperator#getResourceDescriptor()
*/
@SuppressWarnings("unchecked")
@Override
public <T extends IResourceDescriptor> T getResourceDescriptor() {
return (T) this.resourceDescriptor;
}
}
| apache-2.0 |
jpw-erigo/cloudturbine | JavaCode/CTaudio/src/main/java/ctaudio/CTaudio.java | 6016 |
/*
Copyright 2018 Cycronix
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package ctaudio;
import javax.swing.*;
import cycronix.ctlib.CTinfo;
import cycronix.ctlib.CTwriter;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import javax.sound.sampled.*;
/**
* CloudTurbine Audio:
* Record system audio to CT
* <p>
* @author Matt Miller (MJM), Cycronix
* @version 01/16/2017
*
*/
public class CTaudio extends JFrame {
protected boolean running;
ByteArrayOutputStream out;
int frequency = 8000; //8000, 22050, 44100
public CTaudio() {
super("Capture Sound Demo");
setDefaultCloseOperation(EXIT_ON_CLOSE);
Container content = getContentPane();
final JButton capture = new JButton("Capture");
final JButton stop = new JButton("Stop");
capture.setEnabled(true);
stop.setEnabled(false);
ActionListener captureListener =
new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(false);
stop.setEnabled(true);
// play.setEnabled(false);
captureAudio();
}
};
capture.addActionListener(captureListener);
content.add(capture, BorderLayout.NORTH);
ActionListener stopListener =
new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(true);
stop.setEnabled(false);
// play.setEnabled(true);
running = false;
}
};
stop.addActionListener(stopListener);
content.add(stop, BorderLayout.CENTER);
};
private void captureAudio() {
try {
CTwriter ctw = new CTwriter("CTdata/CTaudio");
CTinfo.setDebug(false);
ctw.setBlockMode(true,true); // pack, zip
ctw.autoFlush(0); // no autoflush
// ctw.autoSegment(0); // no segments
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
final TargetDataLine line = (TargetDataLine)AudioSystem.getLine(info);
line.open(format);
line.start();
Runnable runner = new Runnable() {
int bufferSize = (int)format.getSampleRate() * format.getFrameSize(); // bytes in 1 sec
byte buffer[] = new byte[bufferSize];
long fcount = 0;
long startTime = System.currentTimeMillis();
public void run() {
// out = new ByteArrayOutputStream();
running = true;
try {
while (running) {
int count = line.read(buffer, 0, buffer.length);
if (count > 0) {
// out.write(buffer, 0, count);
ctw.setTime(startTime + fcount*1000); // force exactly 1sec intervals? (gapless but drift?)
fcount++;
ctw.putData("audio.wav",addWaveHeader(buffer));
ctw.flush();
}
}
// out.close();
} catch (Exception e) {
System.err.println("I/O problems: " + e);
System.exit(-1);
}
}
};
Thread captureThread = new Thread(runner);
captureThread.start();
}
catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-2);
}
catch(Exception e) {
System.err.println("CT error: "+e);
System.exit(ABORT);
}
}
private AudioFormat getFormat() {
float sampleRate = frequency;
int sampleSizeInBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
}
public static void main(String args[]) {
JFrame frame = new CTaudio();
frame.pack();
frame.show();
}
private byte[] addWaveHeader(byte[] dataBuffer) throws IOException {
byte RECORDER_BPP = 16;
long totalAudioLen = dataBuffer.length;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = frequency;
int channels = 1;
long byteRate = RECORDER_BPP * frequency * channels/8;
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (channels * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
byte[] waveBuffer = new byte[header.length + dataBuffer.length];
System.arraycopy(header, 0, waveBuffer, 0, header.length);
System.arraycopy(dataBuffer, 0, waveBuffer, header.length, dataBuffer.length);
return waveBuffer;
}
}
| apache-2.0 |
darciopacifico/omr | modulesOMR/trunk/JazzFramework/src/main/java/br/com/dlp/framework/jsf/AbstractPaginatedJSFBeanImpl.java | 6161 | /**
*
*/
package br.com.dlp.framework.jsf;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.faces.model.SelectItem;
import org.apache.commons.collections.CollectionUtils;
import br.com.dlp.framework.dao.ExtraArgumentsDTO;
import br.com.dlp.framework.exception.JazzBusinessException;
import br.com.dlp.framework.vo.IBaseVO;
/**
* TODO: Corrigir problema das tabelas editaveis (QuestionnaireCRUD), onde os campos do submodulo não atualizando o bean se não for via ajax:support.
* Implementação abstrata de JSF Bean, com suporte à paginação
* @author dpacifico
*/
public abstract class AbstractPaginatedJSFBeanImpl<B extends IBaseVO<? extends Serializable>> extends AbstractJSFBeanImpl<B> implements IJazzDataProvider<B> {
private static final long serialVersionUID = -5770083259678219532L;
private List<B> resultadosPesquisa= new ArrayList<B>(0);
/**
* Executar pesquisa em banco de dados
* @param extraArgumentsDTO
* @return
*/
public abstract List<B> actionPesquisar(ExtraArgumentsDTO extraArgumentsDTO);
/**
* Avalia estado do cache e determina se a pesquisa em banco de dados deve ser executada ou não
*/
public final List<B> actionPesquisarCached(ExtraArgumentsDTO extraArgumentsDTO) {
if(!isValidRowCountCache()){
this.resultadosPesquisa = actionPesquisar(extraArgumentsDTO);
}
return resultadosPesquisa;
}
/**
* Instância de JazzDataModel para paginação
*/
private JazzDataModel<B> jazzDataModel;
private Collection<SelectItem> opcoesQtdLinhas;
private Integer linhasPorPagina=10;
private Long linhasTotais=null;
/**
* Sobrescreve método salvar apenas para invalidar cache de contagem de registros
* @see br.com.dlp.framework.jsf.AbstractJSFBeanImpl#actionSalvar()
*/
@Override
public String actionSalvar() throws JazzBusinessException {
invalidateRowCountCache();
return super.actionSalvar();
}
/**
* Sobrescreve o método delete apenas para invalidar cache de contagem de registros
* @see br.com.dlp.framework.jsf.AbstractJSFBeanImpl#actionDelete(br.com.dlp.framework.vo.IBaseVO)
*/
@Override
public String actionDelete(final B voBean) {
return super.actionDelete(voBean);
}
/**
* Action para confirma exclusao do registro selecionado
* @see br.com.dlp.framework.jsf.AbstractJSFBeanImpl#deleteConfirm()
*/
@Override
public String deleteConfirm() {
invalidateRowCountCache();
return super.deleteConfirm();
}
/**
* Mantem a contagem de registros em cache, invalidando o mesmo apenas caso os argumentos de pesquisa mudem.
* @see br.com.dlp.framework.jsf.IJazzDataProvider#cachedRowCount()
*/
@Override
public Long cachedRowCount() {
if(!isValidRowCountCache()){
this.linhasTotais = rowCount();
}
return this.linhasTotais;
}
/**
* Testa se o cache de contagem de registros é válido
* @return
*/
@Override
public boolean isValidRowCountCache() {
return linhasTotais!=null && CollectionUtils.isNotEmpty(this.resultadosPesquisa) ;
}
/**
* Invalida o último valor cacheado de contagem de registros.
* Na solução de design proposta, o cache é válido até que os argumentos de pesquisa sejam modificados.
* Caso os argumentos de pesquisa mudem, executar este método.
* @return
*/
public String invalidateRowCountCache() {
linhasTotais=null;
this.resultadosPesquisa=null;
return null;
}
/**
* Implementar contagem de registros resultantes da consulta.
* Este valor será armazenado em cache até que o cache seja invalidado.
* Na solução de design proposta, o cache é válido até que os argumentos de pesquisa sejam modificados.
* @return
*/
protected abstract Long rowCount();
/* (non-Javadoc)
* @see br.com.dlp.framework.jsf.IJazzDataProvider#isRowAvailable(java.io.Serializable)
*/
@Override
public boolean isRowAvailable(Serializable currentPk) {
return currentPk!=null;
//throw new RuntimeException("método isRowAvailable nao implementado !!!!!!!!!!!!!!!!!");
}
/**
* Retorna a especialização de ExtendedDataModel do RichFaces; JazzDataModel
* @return JazzDataModel
*/
public JazzDataModel<B> getDataModel(){
if(this.jazzDataModel==null){
this.jazzDataModel = new JazzDataModel<B>(this);
}
return this.jazzDataModel;
}
@Override
public List<B> getResultados() {
JazzDataModel<B> dataModel = getDataModel();
List<B> resultados = dataModel.getResultados();
return resultados;
}
/**
* Retorna as opções para quantidades de linhas para resultados de pesquisa
* @return
*/
public Collection<SelectItem> getOpcoesQtdLinhas() {
if(opcoesQtdLinhas==null){
opcoesQtdLinhas = new ArrayList<SelectItem>();
opcoesQtdLinhas.add(new SelectItem(5, "5"));
opcoesQtdLinhas.add(new SelectItem(10, "10"));
opcoesQtdLinhas.add(new SelectItem(15, "15"));
opcoesQtdLinhas.add(new SelectItem(25, "25"));
opcoesQtdLinhas.add(new SelectItem(40, "40"));
}
return opcoesQtdLinhas;
}
public void setOpcoesQtdLinhas(Collection<SelectItem> opcoesQtdLinhas) {
this.opcoesQtdLinhas = opcoesQtdLinhas;
}
public Integer getLinhasPorPagina() {
return this.linhasPorPagina;
}
public void setLinhasPorPagina(Integer linhasPorPagina) {
this.linhasPorPagina=linhasPorPagina;
}
/**
* Método pesquisar não pode ser utilizado em AbstractPaginatedJSFBeanImpl
*/
@Override
public String actionPesquisar() {
if(log.isDebugEnabled()){
log.debug("O uso do método actionPesquisar em AbstractPaginatedJSFBeanImpl"+
" não é autorizada para não quebrar o mecanismo de paginação."+
" Verifique a solução de design aplicável para pesquisa utilizando JazzDataModel!");
}
return null;
}
/**
* Limpa os critérios de pesquisa do módulo. Logo após, invalida o cache de contagem de registros retornados
*
* @return
*/
@Override
public String actionLimparPesquisa(){
invalidateRowCountCache();
return super.actionLimparPesquisa();
}
public Long getLinhasTotais() {
return linhasTotais;
}
public void setLinhasTotais(Long linhasTotais) {
this.linhasTotais = linhasTotais;
}
}
| apache-2.0 |
PATRIC3/p3_solr | lucene/misc/src/test/org/apache/lucene/search/TestEarlyTerminatingSortingCollector.java | 13425 | package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.ExitableDirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.QueryTimeout;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.index.SortingMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TestSortingMergePolicy;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.uninverting.UninvertingReader;
import org.apache.lucene.uninverting.UninvertingReader.Type;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
public class TestEarlyTerminatingSortingCollector extends LuceneTestCase {
private int numDocs;
private List<String> terms;
private Directory dir;
private Sort sort;
private RandomIndexWriter iw;
private IndexReader reader;
private SortingMergePolicy mergePolicy;
private final int forceMergeMaxSegmentCount = 5;
@Override
public void setUp() throws Exception {
super.setUp();
sort = new Sort(new SortField("ndv1", SortField.Type.LONG));
}
private Document randomDocument() {
final Document doc = new Document();
doc.add(new NumericDocValuesField("ndv1", random().nextInt(10)));
doc.add(new NumericDocValuesField("ndv2", random().nextInt(10)));
doc.add(new StringField("s", RandomPicks.randomFrom(random(), terms), Store.YES));
return doc;
}
private void createRandomIndex(boolean singleSortedSegment) throws IOException {
dir = newDirectory();
numDocs = atLeast(150);
final int numTerms = TestUtil.nextInt(random(), 1, numDocs / 5);
Set<String> randomTerms = new HashSet<>();
while (randomTerms.size() < numTerms) {
randomTerms.add(TestUtil.randomSimpleString(random()));
}
terms = new ArrayList<>(randomTerms);
final long seed = random().nextLong();
final IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(new Random(seed)));
iwc.setMergeScheduler(new SerialMergeScheduler()); // for reproducible tests
mergePolicy = TestSortingMergePolicy.newSortingMergePolicy(sort);
iwc.setMergePolicy(mergePolicy);
iw = new RandomIndexWriter(new Random(seed), dir, iwc);
iw.setDoRandomForceMerge(false); // don't do this, it may happen anyway with MockRandomMP
for (int i = 0; i < numDocs; ++i) {
final Document doc = randomDocument();
iw.addDocument(doc);
if (i == numDocs / 2 || (i != numDocs - 1 && random().nextInt(8) == 0)) {
iw.commit();
}
if (random().nextInt(15) == 0) {
final String term = RandomPicks.randomFrom(random(), terms);
iw.deleteDocuments(new Term("s", term));
}
}
if (singleSortedSegment) {
// because of deletions, there might still be a single flush segment in
// the index, although want want a sorted segment so it needs to be merged
iw.getReader().close(); // refresh
iw.addDocument(new Document());
iw.commit();
iw.addDocument(new Document());
iw.forceMerge(1);
}
else if (random().nextBoolean()) {
iw.forceMerge(forceMergeMaxSegmentCount);
}
reader = iw.getReader();
}
private void closeIndex() throws IOException {
reader.close();
iw.close();
dir.close();
}
public void testEarlyTermination() throws IOException {
final int iters = atLeast(8);
for (int i = 0; i < iters; ++i) {
createRandomIndex(false);
for (int j = 0; j < iters; ++j) {
final IndexSearcher searcher = newSearcher(reader);
final int numHits = TestUtil.nextInt(random(), 1, numDocs);
final Sort sort = new Sort(new SortField("ndv1", SortField.Type.LONG, false));
final boolean fillFields = random().nextBoolean();
final boolean trackDocScores = random().nextBoolean();
final boolean trackMaxScore = random().nextBoolean();
final TopFieldCollector collector1 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
final TopFieldCollector collector2 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
final Query query;
if (random().nextBoolean()) {
query = new TermQuery(new Term("s", RandomPicks.randomFrom(random(), terms)));
} else {
query = new MatchAllDocsQuery();
}
searcher.search(query, collector1);
searcher.search(query, new EarlyTerminatingSortingCollector(collector2, sort, numHits, mergePolicy.getSort()));
assertTrue(collector1.getTotalHits() >= collector2.getTotalHits());
assertTopDocsEquals(collector1.topDocs().scoreDocs, collector2.topDocs().scoreDocs);
}
closeIndex();
}
}
public void testCanEarlyTerminate() {
assertTrue(EarlyTerminatingSortingCollector.canEarlyTerminate(
new Sort(new SortField("a", SortField.Type.LONG)),
new Sort(new SortField("a", SortField.Type.LONG))));
assertTrue(EarlyTerminatingSortingCollector.canEarlyTerminate(
new Sort(new SortField("a", SortField.Type.LONG), new SortField("b", SortField.Type.STRING)),
new Sort(new SortField("a", SortField.Type.LONG), new SortField("b", SortField.Type.STRING))));
assertTrue(EarlyTerminatingSortingCollector.canEarlyTerminate(
new Sort(new SortField("a", SortField.Type.LONG)),
new Sort(new SortField("a", SortField.Type.LONG), new SortField("b", SortField.Type.STRING))));
assertFalse(EarlyTerminatingSortingCollector.canEarlyTerminate(
new Sort(new SortField("a", SortField.Type.LONG, true)),
new Sort(new SortField("a", SortField.Type.LONG, false))));
assertFalse(EarlyTerminatingSortingCollector.canEarlyTerminate(
new Sort(new SortField("a", SortField.Type.LONG), new SortField("b", SortField.Type.STRING)),
new Sort(new SortField("a", SortField.Type.LONG))));
assertFalse(EarlyTerminatingSortingCollector.canEarlyTerminate(
new Sort(new SortField("a", SortField.Type.LONG), new SortField("b", SortField.Type.STRING)),
new Sort(new SortField("a", SortField.Type.LONG), new SortField("c", SortField.Type.STRING))));
assertFalse(EarlyTerminatingSortingCollector.canEarlyTerminate(
new Sort(new SortField("a", SortField.Type.LONG), new SortField("b", SortField.Type.STRING)),
new Sort(new SortField("c", SortField.Type.LONG), new SortField("b", SortField.Type.STRING))));
}
public void testEarlyTerminationDifferentSorter() throws IOException {
createRandomIndex(false);
final int iters = atLeast(3);
for (int i = 0; i < iters; ++i) {
final IndexSearcher searcher = newSearcher(reader);
// test that the collector works correctly when the index was sorted by a
// different sorter than the one specified in the ctor.
final int numHits = TestUtil.nextInt(random(), 1, numDocs);
final Sort sort = new Sort(new SortField("ndv2", SortField.Type.LONG, false));
final boolean fillFields = random().nextBoolean();
final boolean trackDocScores = random().nextBoolean();
final boolean trackMaxScore = random().nextBoolean();
final TopFieldCollector collector1 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
final TopFieldCollector collector2 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
final Query query;
if (random().nextBoolean()) {
query = new TermQuery(new Term("s", RandomPicks.randomFrom(random(), terms)));
} else {
query = new MatchAllDocsQuery();
}
searcher.search(query, collector1);
Sort different = new Sort(new SortField("ndv2", SortField.Type.LONG));
searcher.search(query, new EarlyTerminatingSortingCollector(collector2, different, numHits, different) {
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
final LeafCollector ret = super.getLeafCollector(context);
assertTrue("segment should not be recognized as sorted as different sorter was used", ret.getClass() == in.getLeafCollector(context).getClass());
return ret;
}
});
assertTrue(collector1.getTotalHits() >= collector2.getTotalHits());
assertTopDocsEquals(collector1.topDocs().scoreDocs, collector2.topDocs().scoreDocs);
}
closeIndex();
}
private static void assertTopDocsEquals(ScoreDoc[] scoreDocs1, ScoreDoc[] scoreDocs2) {
assertEquals(scoreDocs1.length, scoreDocs2.length);
for (int i = 0; i < scoreDocs1.length; ++i) {
final ScoreDoc scoreDoc1 = scoreDocs1[i];
final ScoreDoc scoreDoc2 = scoreDocs2[i];
assertEquals(scoreDoc1.doc, scoreDoc2.doc);
assertEquals(scoreDoc1.score, scoreDoc2.score, 0.001f);
}
}
private class TestTerminatedEarlySimpleCollector extends SimpleCollector {
private boolean collectedSomething;
public boolean collectedSomething() {
return collectedSomething;
}
@Override
public void collect(int doc) throws IOException {
collectedSomething = true;
}
@Override
public boolean needsScores() {
return false;
}
}
private class TestEarlyTerminatingSortingcollectorQueryTimeout implements QueryTimeout {
final private boolean shouldExit;
public TestEarlyTerminatingSortingcollectorQueryTimeout(boolean shouldExit) {
this.shouldExit = shouldExit;
}
public boolean shouldExit() {
return shouldExit;
}
}
private IndexSearcher newSearcherForTestTerminatedEarly(IndexReader r) throws IOException {
switch(random().nextInt(2)) {
case 0:
return new IndexSearcher(r);
case 1:
assertTrue(r+" is not a DirectoryReader", (r instanceof DirectoryReader));
final DirectoryReader directoryReader = ExitableDirectoryReader.wrap(
UninvertingReader.wrap((DirectoryReader) r, new HashMap<String,Type>()),
new TestEarlyTerminatingSortingcollectorQueryTimeout(false));
return new IndexSearcher(directoryReader);
}
fail("newSearcherForTestTerminatedEarly("+r+") fell through switch");
return null;
}
public void testTerminatedEarly() throws IOException {
final int iters = atLeast(8);
for (int i = 0; i < iters; ++i) {
createRandomIndex(true);
final IndexSearcher searcher = newSearcherForTestTerminatedEarly(reader); // future TODO: use newSearcher(reader);
final Query query = new MatchAllDocsQuery(); // search for everything/anything
final TestTerminatedEarlySimpleCollector collector1 = new TestTerminatedEarlySimpleCollector();
searcher.search(query, collector1);
final TestTerminatedEarlySimpleCollector collector2 = new TestTerminatedEarlySimpleCollector();
final EarlyTerminatingSortingCollector etsCollector = new EarlyTerminatingSortingCollector(collector2, sort, 1, mergePolicy.getSort());
searcher.search(query, etsCollector);
assertTrue("collector1="+collector1.collectedSomething()+" vs. collector2="+collector2.collectedSomething(), collector1.collectedSomething() == collector2.collectedSomething());
if (collector1.collectedSomething()) {
// we collected something and since we modestly asked for just one document we should have terminated early
assertTrue("should have terminated early (searcher.reader="+searcher.reader+")", etsCollector.terminatedEarly());
}
closeIndex();
}
}
}
| apache-2.0 |
lburgazzoli/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/servlet/WebMvcAutoConfiguration.java | 27395 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web.servlet;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Optional;
import javax.servlet.Servlet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.AutoConfigureOrder;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication.Type;
import org.springframework.boot.autoconfigure.http.HttpMessageConverters;
import org.springframework.boot.autoconfigure.task.TaskExecutionAutoConfiguration;
import org.springframework.boot.autoconfigure.template.TemplateAvailabilityProviders;
import org.springframework.boot.autoconfigure.validation.ValidationAutoConfiguration;
import org.springframework.boot.autoconfigure.validation.ValidatorAdapter;
import org.springframework.boot.autoconfigure.web.ConditionalOnEnabledResourceChain;
import org.springframework.boot.autoconfigure.web.ResourceProperties;
import org.springframework.boot.autoconfigure.web.ResourceProperties.Strategy;
import org.springframework.boot.autoconfigure.web.format.WebConversionService;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.servlet.filter.OrderedFormContentFilter;
import org.springframework.boot.web.servlet.filter.OrderedHiddenHttpMethodFilter;
import org.springframework.boot.web.servlet.filter.OrderedRequestContextFilter;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ResourceLoaderAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.convert.converter.GenericConverter;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.task.AsyncTaskExecutor;
import org.springframework.format.Formatter;
import org.springframework.format.FormatterRegistry;
import org.springframework.format.support.FormattingConversionService;
import org.springframework.http.CacheControl;
import org.springframework.http.MediaType;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.util.ClassUtils;
import org.springframework.validation.DefaultMessageCodesResolver;
import org.springframework.validation.MessageCodesResolver;
import org.springframework.validation.Validator;
import org.springframework.web.HttpMediaTypeNotAcceptableException;
import org.springframework.web.accept.ContentNegotiationManager;
import org.springframework.web.accept.ContentNegotiationStrategy;
import org.springframework.web.accept.PathExtensionContentNegotiationStrategy;
import org.springframework.web.bind.support.ConfigurableWebBindingInitializer;
import org.springframework.web.context.request.NativeWebRequest;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextListener;
import org.springframework.web.filter.FormContentFilter;
import org.springframework.web.filter.HiddenHttpMethodFilter;
import org.springframework.web.filter.RequestContextFilter;
import org.springframework.web.servlet.DispatcherServlet;
import org.springframework.web.servlet.HandlerExceptionResolver;
import org.springframework.web.servlet.LocaleResolver;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.ViewResolver;
import org.springframework.web.servlet.config.annotation.AsyncSupportConfigurer;
import org.springframework.web.servlet.config.annotation.ContentNegotiationConfigurer;
import org.springframework.web.servlet.config.annotation.DelegatingWebMvcConfiguration;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.PathMatchConfigurer;
import org.springframework.web.servlet.config.annotation.ResourceChainRegistration;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistration;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.springframework.web.servlet.handler.AbstractHandlerExceptionResolver;
import org.springframework.web.servlet.handler.SimpleUrlHandlerMapping;
import org.springframework.web.servlet.i18n.AcceptHeaderLocaleResolver;
import org.springframework.web.servlet.i18n.FixedLocaleResolver;
import org.springframework.web.servlet.mvc.method.annotation.ExceptionHandlerExceptionResolver;
import org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter;
import org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping;
import org.springframework.web.servlet.resource.AppCacheManifestTransformer;
import org.springframework.web.servlet.resource.EncodedResourceResolver;
import org.springframework.web.servlet.resource.ResourceHttpRequestHandler;
import org.springframework.web.servlet.resource.ResourceResolver;
import org.springframework.web.servlet.resource.ResourceUrlProvider;
import org.springframework.web.servlet.resource.VersionResourceResolver;
import org.springframework.web.servlet.view.BeanNameViewResolver;
import org.springframework.web.servlet.view.ContentNegotiatingViewResolver;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
/**
* {@link EnableAutoConfiguration Auto-configuration} for {@link EnableWebMvc Web MVC}.
*
* @author Phillip Webb
* @author Dave Syer
* @author Andy Wilkinson
* @author Sébastien Deleuze
* @author Eddú Meléndez
* @author Stephane Nicoll
* @author Kristine Jetzke
* @author Bruce Brouwer
* @author Artsiom Yudovin
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnWebApplication(type = Type.SERVLET)
@ConditionalOnClass({ Servlet.class, DispatcherServlet.class, WebMvcConfigurer.class })
@ConditionalOnMissingBean(WebMvcConfigurationSupport.class)
@AutoConfigureOrder(Ordered.HIGHEST_PRECEDENCE + 10)
@AutoConfigureAfter({ DispatcherServletAutoConfiguration.class,
TaskExecutionAutoConfiguration.class, ValidationAutoConfiguration.class })
public class WebMvcAutoConfiguration {
public static final String DEFAULT_PREFIX = "";
public static final String DEFAULT_SUFFIX = "";
private static final String[] SERVLET_LOCATIONS = { "/" };
@Bean
@ConditionalOnMissingBean(HiddenHttpMethodFilter.class)
@ConditionalOnProperty(prefix = "spring.mvc.hiddenmethod.filter", name = "enabled",
matchIfMissing = true)
public OrderedHiddenHttpMethodFilter hiddenHttpMethodFilter() {
return new OrderedHiddenHttpMethodFilter();
}
@Bean
@ConditionalOnMissingBean(FormContentFilter.class)
@ConditionalOnProperty(prefix = "spring.mvc.formcontent.filter", name = "enabled",
matchIfMissing = true)
public OrderedFormContentFilter formContentFilter() {
return new OrderedFormContentFilter();
}
// Defined as a nested config to ensure WebMvcConfigurer is not read when not
// on the classpath
@Configuration(proxyBeanMethods = false)
@Import(EnableWebMvcConfiguration.class)
@EnableConfigurationProperties({ WebMvcProperties.class, ResourceProperties.class })
@Order(0)
public static class WebMvcAutoConfigurationAdapter
implements WebMvcConfigurer, ResourceLoaderAware {
private static final Log logger = LogFactory.getLog(WebMvcConfigurer.class);
private final ResourceProperties resourceProperties;
private final WebMvcProperties mvcProperties;
private final ListableBeanFactory beanFactory;
private final ObjectProvider<HttpMessageConverters> messageConvertersProvider;
final ResourceHandlerRegistrationCustomizer resourceHandlerRegistrationCustomizer;
private ResourceLoader resourceLoader;
public WebMvcAutoConfigurationAdapter(ResourceProperties resourceProperties,
WebMvcProperties mvcProperties, ListableBeanFactory beanFactory,
ObjectProvider<HttpMessageConverters> messageConvertersProvider,
ObjectProvider<ResourceHandlerRegistrationCustomizer> resourceHandlerRegistrationCustomizerProvider) {
this.resourceProperties = resourceProperties;
this.mvcProperties = mvcProperties;
this.beanFactory = beanFactory;
this.messageConvertersProvider = messageConvertersProvider;
this.resourceHandlerRegistrationCustomizer = resourceHandlerRegistrationCustomizerProvider
.getIfAvailable();
}
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Override
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
this.messageConvertersProvider.ifAvailable((customConverters) -> converters
.addAll(customConverters.getConverters()));
}
@Override
public void configureAsyncSupport(AsyncSupportConfigurer configurer) {
if (this.beanFactory.containsBean(
TaskExecutionAutoConfiguration.APPLICATION_TASK_EXECUTOR_BEAN_NAME)) {
Object taskExecutor = this.beanFactory.getBean(
TaskExecutionAutoConfiguration.APPLICATION_TASK_EXECUTOR_BEAN_NAME);
if (taskExecutor instanceof AsyncTaskExecutor) {
configurer.setTaskExecutor(((AsyncTaskExecutor) taskExecutor));
}
}
Duration timeout = this.mvcProperties.getAsync().getRequestTimeout();
if (timeout != null) {
configurer.setDefaultTimeout(timeout.toMillis());
}
}
@Override
public void configurePathMatch(PathMatchConfigurer configurer) {
configurer.setUseSuffixPatternMatch(
this.mvcProperties.getPathmatch().isUseSuffixPattern());
configurer.setUseRegisteredSuffixPatternMatch(
this.mvcProperties.getPathmatch().isUseRegisteredSuffixPattern());
}
@Override
public void configureContentNegotiation(ContentNegotiationConfigurer configurer) {
WebMvcProperties.Contentnegotiation contentnegotiation = this.mvcProperties
.getContentnegotiation();
configurer.favorPathExtension(contentnegotiation.isFavorPathExtension());
configurer.favorParameter(contentnegotiation.isFavorParameter());
if (contentnegotiation.getParameterName() != null) {
configurer.parameterName(contentnegotiation.getParameterName());
}
Map<String, MediaType> mediaTypes = this.mvcProperties.getContentnegotiation()
.getMediaTypes();
mediaTypes.forEach(configurer::mediaType);
}
@Bean
@ConditionalOnMissingBean
public InternalResourceViewResolver defaultViewResolver() {
InternalResourceViewResolver resolver = new InternalResourceViewResolver();
resolver.setPrefix(this.mvcProperties.getView().getPrefix());
resolver.setSuffix(this.mvcProperties.getView().getSuffix());
return resolver;
}
@Bean
@ConditionalOnBean(View.class)
@ConditionalOnMissingBean
public BeanNameViewResolver beanNameViewResolver() {
BeanNameViewResolver resolver = new BeanNameViewResolver();
resolver.setOrder(Ordered.LOWEST_PRECEDENCE - 10);
return resolver;
}
@Bean
@ConditionalOnBean(ViewResolver.class)
@ConditionalOnMissingBean(name = "viewResolver",
value = ContentNegotiatingViewResolver.class)
public ContentNegotiatingViewResolver viewResolver(BeanFactory beanFactory) {
ContentNegotiatingViewResolver resolver = new ContentNegotiatingViewResolver();
resolver.setContentNegotiationManager(
beanFactory.getBean(ContentNegotiationManager.class));
// ContentNegotiatingViewResolver uses all the other view resolvers to locate
// a view so it should have a high precedence
resolver.setOrder(Ordered.HIGHEST_PRECEDENCE);
return resolver;
}
@Bean
@ConditionalOnMissingBean
@ConditionalOnProperty(prefix = "spring.mvc", name = "locale")
public LocaleResolver localeResolver() {
if (this.mvcProperties
.getLocaleResolver() == WebMvcProperties.LocaleResolver.FIXED) {
return new FixedLocaleResolver(this.mvcProperties.getLocale());
}
AcceptHeaderLocaleResolver localeResolver = new AcceptHeaderLocaleResolver();
localeResolver.setDefaultLocale(this.mvcProperties.getLocale());
return localeResolver;
}
@Override
public MessageCodesResolver getMessageCodesResolver() {
if (this.mvcProperties.getMessageCodesResolverFormat() != null) {
DefaultMessageCodesResolver resolver = new DefaultMessageCodesResolver();
resolver.setMessageCodeFormatter(
this.mvcProperties.getMessageCodesResolverFormat());
return resolver;
}
return null;
}
@Override
public void addFormatters(FormatterRegistry registry) {
for (Converter<?, ?> converter : getBeansOfType(Converter.class)) {
registry.addConverter(converter);
}
for (GenericConverter converter : getBeansOfType(GenericConverter.class)) {
registry.addConverter(converter);
}
for (Formatter<?> formatter : getBeansOfType(Formatter.class)) {
registry.addFormatter(formatter);
}
}
private <T> Collection<T> getBeansOfType(Class<T> type) {
return this.beanFactory.getBeansOfType(type).values();
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
if (!this.resourceProperties.isAddMappings()) {
logger.debug("Default resource handling disabled");
return;
}
Duration cachePeriod = this.resourceProperties.getCache().getPeriod();
CacheControl cacheControl = this.resourceProperties.getCache()
.getCachecontrol().toHttpCacheControl();
if (!registry.hasMappingForPattern("/webjars/**")) {
customizeResourceHandlerRegistration(registry
.addResourceHandler("/webjars/**")
.addResourceLocations("classpath:/META-INF/resources/webjars/")
.setCachePeriod(getSeconds(cachePeriod))
.setCacheControl(cacheControl));
}
String staticPathPattern = this.mvcProperties.getStaticPathPattern();
if (!registry.hasMappingForPattern(staticPathPattern)) {
customizeResourceHandlerRegistration(
registry.addResourceHandler(staticPathPattern)
.addResourceLocations(getResourceLocations(
this.resourceProperties.getStaticLocations()))
.setCachePeriod(getSeconds(cachePeriod))
.setCacheControl(cacheControl));
}
}
private Integer getSeconds(Duration cachePeriod) {
return (cachePeriod != null) ? (int) cachePeriod.getSeconds() : null;
}
@Bean
public WelcomePageHandlerMapping welcomePageHandlerMapping(
ApplicationContext applicationContext) {
return new WelcomePageHandlerMapping(
new TemplateAvailabilityProviders(applicationContext),
applicationContext, getWelcomePage(),
this.mvcProperties.getStaticPathPattern());
}
static String[] getResourceLocations(String[] staticLocations) {
String[] locations = new String[staticLocations.length
+ SERVLET_LOCATIONS.length];
System.arraycopy(staticLocations, 0, locations, 0, staticLocations.length);
System.arraycopy(SERVLET_LOCATIONS, 0, locations, staticLocations.length,
SERVLET_LOCATIONS.length);
return locations;
}
private Optional<Resource> getWelcomePage() {
String[] locations = getResourceLocations(
this.resourceProperties.getStaticLocations());
return Arrays.stream(locations).map(this::getIndexHtml)
.filter(this::isReadable).findFirst();
}
private Resource getIndexHtml(String location) {
return this.resourceLoader.getResource(location + "index.html");
}
private boolean isReadable(Resource resource) {
try {
return resource.exists() && (resource.getURL() != null);
}
catch (Exception ex) {
return false;
}
}
private void customizeResourceHandlerRegistration(
ResourceHandlerRegistration registration) {
if (this.resourceHandlerRegistrationCustomizer != null) {
this.resourceHandlerRegistrationCustomizer.customize(registration);
}
}
@Bean
@ConditionalOnMissingBean({ RequestContextListener.class,
RequestContextFilter.class })
@ConditionalOnMissingFilterBean(RequestContextFilter.class)
public static RequestContextFilter requestContextFilter() {
return new OrderedRequestContextFilter();
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(value = "spring.mvc.favicon.enabled",
matchIfMissing = true)
public static class FaviconConfiguration implements ResourceLoaderAware {
private final ResourceProperties resourceProperties;
private ResourceLoader resourceLoader;
public FaviconConfiguration(ResourceProperties resourceProperties) {
this.resourceProperties = resourceProperties;
}
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Bean
public SimpleUrlHandlerMapping faviconHandlerMapping(
FaviconRequestHandler handler) {
SimpleUrlHandlerMapping mapping = new SimpleUrlHandlerMapping();
mapping.setOrder(Ordered.HIGHEST_PRECEDENCE + 1);
mapping.setUrlMap(Collections.singletonMap("**/favicon.ico", handler));
return mapping;
}
@Bean
public FaviconRequestHandler faviconRequestHandler() {
return new FaviconRequestHandler(resolveFaviconLocations());
}
private List<Resource> resolveFaviconLocations() {
String[] staticLocations = getResourceLocations(
this.resourceProperties.getStaticLocations());
List<Resource> locations = new ArrayList<>(staticLocations.length + 1);
Arrays.stream(staticLocations).map(this.resourceLoader::getResource)
.forEach(locations::add);
locations.add(new ClassPathResource("/"));
return Collections.unmodifiableList(locations);
}
}
static final class FaviconRequestHandler extends ResourceHttpRequestHandler {
FaviconRequestHandler(List<Resource> locations) {
setLocations(locations);
}
}
}
/**
* Configuration equivalent to {@code @EnableWebMvc}.
*/
@Configuration(proxyBeanMethods = false)
public static class EnableWebMvcConfiguration extends DelegatingWebMvcConfiguration {
private final WebMvcProperties mvcProperties;
private final ListableBeanFactory beanFactory;
private final WebMvcRegistrations mvcRegistrations;
public EnableWebMvcConfiguration(
ObjectProvider<WebMvcProperties> mvcPropertiesProvider,
ObjectProvider<WebMvcRegistrations> mvcRegistrationsProvider,
ListableBeanFactory beanFactory) {
this.mvcProperties = mvcPropertiesProvider.getIfAvailable();
this.mvcRegistrations = mvcRegistrationsProvider.getIfUnique();
this.beanFactory = beanFactory;
}
@Bean
@Override
public RequestMappingHandlerAdapter requestMappingHandlerAdapter(
ContentNegotiationManager mvcContentNegotiationManager,
FormattingConversionService mvcConversionService,
Validator mvcValidator) {
RequestMappingHandlerAdapter adapter = super.requestMappingHandlerAdapter(
mvcContentNegotiationManager, mvcConversionService, mvcValidator);
adapter.setIgnoreDefaultModelOnRedirect(this.mvcProperties == null
|| this.mvcProperties.isIgnoreDefaultModelOnRedirect());
return adapter;
}
@Override
protected RequestMappingHandlerAdapter createRequestMappingHandlerAdapter() {
if (this.mvcRegistrations != null
&& this.mvcRegistrations.getRequestMappingHandlerAdapter() != null) {
return this.mvcRegistrations.getRequestMappingHandlerAdapter();
}
return super.createRequestMappingHandlerAdapter();
}
@Bean
@Primary
@Override
public RequestMappingHandlerMapping requestMappingHandlerMapping(
ContentNegotiationManager mvcContentNegotiationManager,
FormattingConversionService mvcConversionService,
ResourceUrlProvider mvcResourceUrlProvider) {
// Must be @Primary for MvcUriComponentsBuilder to work
return super.requestMappingHandlerMapping(mvcContentNegotiationManager,
mvcConversionService, mvcResourceUrlProvider);
}
@Bean
@Override
public FormattingConversionService mvcConversionService() {
WebConversionService conversionService = new WebConversionService(
this.mvcProperties.getDateFormat());
addFormatters(conversionService);
return conversionService;
}
@Bean
@Override
public Validator mvcValidator() {
if (!ClassUtils.isPresent("javax.validation.Validator",
getClass().getClassLoader())) {
return super.mvcValidator();
}
return ValidatorAdapter.get(getApplicationContext(), getValidator());
}
@Override
protected RequestMappingHandlerMapping createRequestMappingHandlerMapping() {
if (this.mvcRegistrations != null
&& this.mvcRegistrations.getRequestMappingHandlerMapping() != null) {
return this.mvcRegistrations.getRequestMappingHandlerMapping();
}
return super.createRequestMappingHandlerMapping();
}
@Override
protected ConfigurableWebBindingInitializer getConfigurableWebBindingInitializer(
FormattingConversionService mvcConversionService,
Validator mvcValidator) {
try {
return this.beanFactory.getBean(ConfigurableWebBindingInitializer.class);
}
catch (NoSuchBeanDefinitionException ex) {
return super.getConfigurableWebBindingInitializer(mvcConversionService,
mvcValidator);
}
}
@Override
protected ExceptionHandlerExceptionResolver createExceptionHandlerExceptionResolver() {
if (this.mvcRegistrations != null && this.mvcRegistrations
.getExceptionHandlerExceptionResolver() != null) {
return this.mvcRegistrations.getExceptionHandlerExceptionResolver();
}
return super.createExceptionHandlerExceptionResolver();
}
@Override
protected void extendHandlerExceptionResolvers(
List<HandlerExceptionResolver> exceptionResolvers) {
super.extendHandlerExceptionResolvers(exceptionResolvers);
if (this.mvcProperties.isLogResolvedException()) {
for (HandlerExceptionResolver resolver : exceptionResolvers) {
if (resolver instanceof AbstractHandlerExceptionResolver) {
((AbstractHandlerExceptionResolver) resolver)
.setWarnLogCategory(resolver.getClass().getName());
}
}
}
}
@Bean
@Override
public ContentNegotiationManager mvcContentNegotiationManager() {
ContentNegotiationManager manager = super.mvcContentNegotiationManager();
List<ContentNegotiationStrategy> strategies = manager.getStrategies();
ListIterator<ContentNegotiationStrategy> iterator = strategies.listIterator();
while (iterator.hasNext()) {
ContentNegotiationStrategy strategy = iterator.next();
if (strategy instanceof PathExtensionContentNegotiationStrategy) {
iterator.set(new OptionalPathExtensionContentNegotiationStrategy(
strategy));
}
}
return manager;
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnEnabledResourceChain
static class ResourceChainCustomizerConfiguration {
@Bean
public ResourceChainResourceHandlerRegistrationCustomizer resourceHandlerRegistrationCustomizer() {
return new ResourceChainResourceHandlerRegistrationCustomizer();
}
}
interface ResourceHandlerRegistrationCustomizer {
void customize(ResourceHandlerRegistration registration);
}
static class ResourceChainResourceHandlerRegistrationCustomizer
implements ResourceHandlerRegistrationCustomizer {
@Autowired
private ResourceProperties resourceProperties = new ResourceProperties();
@Override
public void customize(ResourceHandlerRegistration registration) {
ResourceProperties.Chain properties = this.resourceProperties.getChain();
configureResourceChain(properties,
registration.resourceChain(properties.isCache()));
}
private void configureResourceChain(ResourceProperties.Chain properties,
ResourceChainRegistration chain) {
Strategy strategy = properties.getStrategy();
if (properties.isCompressed()) {
chain.addResolver(new EncodedResourceResolver());
}
if (strategy.getFixed().isEnabled() || strategy.getContent().isEnabled()) {
chain.addResolver(getVersionResourceResolver(strategy));
}
if (properties.isHtmlApplicationCache()) {
chain.addTransformer(new AppCacheManifestTransformer());
}
}
private ResourceResolver getVersionResourceResolver(
ResourceProperties.Strategy properties) {
VersionResourceResolver resolver = new VersionResourceResolver();
if (properties.getFixed().isEnabled()) {
String version = properties.getFixed().getVersion();
String[] paths = properties.getFixed().getPaths();
resolver.addFixedVersionStrategy(version, paths);
}
if (properties.getContent().isEnabled()) {
String[] paths = properties.getContent().getPaths();
resolver.addContentVersionStrategy(paths);
}
return resolver;
}
}
/**
* Decorator to make {@link PathExtensionContentNegotiationStrategy} optional
* depending on a request attribute.
*/
static class OptionalPathExtensionContentNegotiationStrategy
implements ContentNegotiationStrategy {
private static final String SKIP_ATTRIBUTE = PathExtensionContentNegotiationStrategy.class
.getName() + ".SKIP";
private final ContentNegotiationStrategy delegate;
OptionalPathExtensionContentNegotiationStrategy(
ContentNegotiationStrategy delegate) {
this.delegate = delegate;
}
@Override
public List<MediaType> resolveMediaTypes(NativeWebRequest webRequest)
throws HttpMediaTypeNotAcceptableException {
Object skip = webRequest.getAttribute(SKIP_ATTRIBUTE,
RequestAttributes.SCOPE_REQUEST);
if (skip != null && Boolean.parseBoolean(skip.toString())) {
return MEDIA_TYPE_ALL_LIST;
}
return this.delegate.resolveMediaTypes(webRequest);
}
}
}
| apache-2.0 |
msoftware/AsyncMockWebServer | asyncmockwebserver-example/src/mock/java/com/mosn/asyncmockwebserver/example/ITunesSearchComponent.java | 526 | package com.mosn.asyncmockwebserver.example;
import com.mosn.asyncmockwebserver.example.infrastructure.InfrastructureModule;
import com.mosn.asyncmockwebserver.example.infrastructure.api.search.ContentsRepositoryImpl;
import javax.inject.Singleton;
import dagger.Component;
@Singleton
@Component( modules = {ITunesSearchModule.class, InfrastructureModule.class} )
public interface ITunesSearchComponent {
void inject(ITunesSearchApplication application);
void inject(ContentsRepositoryImpl searchApiRepository);
}
| apache-2.0 |
apache/uima-uimaj | uimaj-json/src/test/java/org/apache/uima/json/JsonXmiCasSerializerTest.java | 1455 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.json;
import org.junit.jupiter.api.BeforeEach;
public class JsonXmiCasSerializerTest extends JsonCasSerializerTest {
// @formatter:off
/*********************************************************************
* This is a version of the Json tests that does XMI
* serialization instead
*
* It extends the other JsonCasSerializerTest but sets up some flags
*********************************************************************/
// @formatter:on
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
jcs = new JsonCasSerializer();
doJson = false;
}
}
| apache-2.0 |
mdogan/hazelcast | hazelcast/src/test/java/com/hazelcast/partition/AbstractPartitionServiceNullTest.java | 2109 | /*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.partition;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.internal.util.ExceptionUtil;
import com.hazelcast.test.HazelcastTestSupport;
import org.junit.Assert;
import org.junit.Test;
import java.util.function.Consumer;
import static org.junit.Assert.fail;
public abstract class AbstractPartitionServiceNullTest extends HazelcastTestSupport {
@Test
public void testNullability() {
assertThrowsNPE(ps -> ps.getPartition(null));
}
private void assertThrowsNPE(ConsumerEx<PartitionService> method) {
assertThrows(NullPointerException.class, method);
}
private void assertThrows(Class<? extends Exception> expectedExceptionClass,
ConsumerEx<PartitionService> method) {
try {
method.accept(getDriver().getPartitionService());
fail("Expected " + expectedExceptionClass
+ " but there was no exception!");
} catch (Exception e) {
Assert.assertSame(expectedExceptionClass, e.getClass());
}
}
@FunctionalInterface
public interface ConsumerEx<T> extends Consumer<T> {
void acceptEx(T t) throws Exception;
@Override
default void accept(T t) {
try {
acceptEx(t);
} catch (Exception e) {
ExceptionUtil.sneakyThrow(e);
}
}
}
protected abstract HazelcastInstance getDriver();
}
| apache-2.0 |
ericwjr/floating-text-button | app/src/main/java/ru/dimorinny/floatingsample/MainActivity.java | 973 | package ru.dimorinny.floatingsample;
import android.os.Bundle;
import android.support.design.widget.CoordinatorLayout;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import ru.dimorinny.floatingtextbutton.FloatingTextButton;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final CoordinatorLayout container = (CoordinatorLayout) findViewById(R.id.container);
FloatingTextButton callButton = (FloatingTextButton) findViewById(R.id.call_button);
callButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Snackbar.make(container, "Call button clicked", Snackbar.LENGTH_SHORT).show();
}
});
}
}
| apache-2.0 |
kangkot/cassandra-lucene-index | plugin/src/main/java/com/stratio/cassandra/lucene/service/TokenMapperMurmur.java | 3458 | /*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.cassandra.lucene.service;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.dht.Token;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.LongField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.DocValuesRangeQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
import java.util.Collections;
import java.util.List;
/**
* {@link PartitionKeyMapper} to be used when {@link org.apache.cassandra.dht.Murmur3Partitioner} is used. It indexes
* the token long value as a Lucene long field.
*
* @author Andres de la Pena {@literal <adelapena@stratio.com>}
*/
public class TokenMapperMurmur extends TokenMapper {
/** The Lucene field name. */
private static final String FIELD_NAME = "_token_murmur";
/** The Lucene field type. */
private static final FieldType FIELD_TYPE = new FieldType();
static {
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setNumericType(FieldType.NumericType.LONG);
FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC);
FIELD_TYPE.freeze();
}
/**
* Builds a new {@link TokenMapperMurmur}.
*/
public TokenMapperMurmur() {
super();
}
/** {@inheritDoc} */
@Override
public void addFields(Document document, DecoratedKey partitionKey) {
Token token = partitionKey.getToken();
Long value = value(token);
document.add(new LongField(FIELD_NAME, value, FIELD_TYPE));
}
/** {@inheritDoc} */
@Override
public Query query(Token token) {
Long value = value(token);
return NumericRangeQuery.newLongRange(FIELD_NAME, value, value, true, true);
}
/** {@inheritDoc} */
@Override
protected Query doQuery(Token lower, Token upper, boolean includeLower, boolean includeUpper) {
Long start = lower == null || lower.isMinimum() ? null : value(lower);
Long stop = upper == null || upper.isMinimum() ? null : value(upper);
return DocValuesRangeQuery.newLongRange(FIELD_NAME, start, stop, includeLower, includeUpper);
}
/** {@inheritDoc} */
@Override
public List<SortField> sortFields() {
return Collections.singletonList(new SortField(FIELD_NAME, SortField.Type.LONG));
}
private static Long value(Token token) {
return (Long) token.getTokenValue();
}
}
| apache-2.0 |
ryanemerson/activemq-artemis | tests/integration-tests/src/test/java/org/apache/activemq/artemis/tests/integration/management/ManagementServiceImplTest.java | 7453 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.management;
import org.apache.activemq.artemis.tests.unit.core.postoffice.impl.FakeQueue;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.junit.Test;
import org.junit.Assert;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.management.AddressControl;
import org.apache.activemq.artemis.api.core.management.ManagementHelper;
import org.apache.activemq.artemis.api.core.management.QueueControl;
import org.apache.activemq.artemis.api.core.management.ResourceNames;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.persistence.impl.nullpm.NullStorageManager;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.server.ActiveMQServers;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.core.server.ServerMessage;
import org.apache.activemq.artemis.core.server.impl.ServerMessageImpl;
import org.apache.activemq.artemis.core.server.management.impl.ManagementServiceImpl;
import org.apache.activemq.artemis.tests.integration.server.FakeStorageManager;
import org.apache.activemq.artemis.tests.util.RandomUtil;
public class ManagementServiceImplTest extends ActiveMQTestBase
{
@Test
public void testHandleManagementMessageWithOperation() throws Exception
{
String queue = RandomUtil.randomString();
String address = RandomUtil.randomString();
Configuration config = createBasicConfig()
.setJMXManagementEnabled(false);
ActiveMQServer server = addServer(ActiveMQServers.newActiveMQServer(config, false));
server.start();
// invoke attribute and operation on the server
ServerMessage message = new ServerMessageImpl(1, 100);
ManagementHelper.putOperationInvocation(message, ResourceNames.CORE_SERVER, "createQueue", queue, address);
ServerMessage reply = server.getManagementService().handleMessage(message);
Assert.assertTrue(ManagementHelper.hasOperationSucceeded(reply));
}
@Test
public void testHandleManagementMessageWithOperationWhichFails() throws Exception
{
Configuration config = createBasicConfig()
.setJMXManagementEnabled(false);
ActiveMQServer server = addServer(ActiveMQServers.newActiveMQServer(config, false));
server.start();
// invoke attribute and operation on the server
ServerMessage message = new ServerMessageImpl(1, 100);
ManagementHelper.putOperationInvocation(message, ResourceNames.CORE_SERVER, "thereIsNoSuchOperation");
ServerMessage reply = server.getManagementService().handleMessage(message);
Assert.assertFalse(ManagementHelper.hasOperationSucceeded(reply));
Assert.assertNotNull(ManagementHelper.getResult(reply));
}
@Test
public void testHandleManagementMessageWithUnknowResource() throws Exception
{
Configuration config = createBasicConfig()
.setJMXManagementEnabled(false);
ActiveMQServer server = addServer(ActiveMQServers.newActiveMQServer(config, false));
server.start();
// invoke attribute and operation on the server
ServerMessage message = new ServerMessageImpl(1, 100);
ManagementHelper.putOperationInvocation(message, "Resouce.Does.Not.Exist", "toString");
ServerMessage reply = server.getManagementService().handleMessage(message);
Assert.assertFalse(ManagementHelper.hasOperationSucceeded(reply));
Assert.assertNotNull(ManagementHelper.getResult(reply));
}
@Test
public void testHandleManagementMessageWithUnknownAttribute() throws Exception
{
Configuration config = createBasicConfig()
.setJMXManagementEnabled(false);
ActiveMQServer server = addServer(ActiveMQServers.newActiveMQServer(config, false));
server.start();
// invoke attribute and operation on the server
ServerMessage message = new ServerMessageImpl(1, 100);
ManagementHelper.putAttribute(message, ResourceNames.CORE_SERVER, "started");
ServerMessage reply = server.getManagementService().handleMessage(message);
Assert.assertTrue(ManagementHelper.hasOperationSucceeded(reply));
Assert.assertTrue((Boolean) ManagementHelper.getResult(reply));
}
@Test
public void testHandleManagementMessageWithKnownAttribute() throws Exception
{
Configuration config = createBasicConfig()
.setJMXManagementEnabled(false);
ActiveMQServer server = addServer(ActiveMQServers.newActiveMQServer(config, false));
server.start();
// invoke attribute and operation on the server
ServerMessage message = new ServerMessageImpl(1, 100);
ManagementHelper.putAttribute(message, ResourceNames.CORE_SERVER, "attribute.Does.Not.Exist");
ServerMessage reply = server.getManagementService().handleMessage(message);
Assert.assertFalse(ManagementHelper.hasOperationSucceeded(reply));
Assert.assertNotNull(ManagementHelper.getResult(reply));
}
@Test
public void testGetResources() throws Exception
{
Configuration config = createBasicConfig()
.setJMXManagementEnabled(false);
ManagementServiceImpl managementService = new ManagementServiceImpl(null, config);
managementService.setStorageManager(new NullStorageManager());
SimpleString address = RandomUtil.randomSimpleString();
managementService.registerAddress(address);
Queue queue = new FakeQueue(RandomUtil.randomSimpleString());
managementService.registerQueue(queue, RandomUtil.randomSimpleString(), new FakeStorageManager());
Object[] addresses = managementService.getResources(AddressControl.class);
Assert.assertEquals(1, addresses.length);
Assert.assertTrue(addresses[0] instanceof AddressControl);
AddressControl addressControl = (AddressControl)addresses[0];
Assert.assertEquals(address.toString(), addressControl.getAddress());
Object[] queues = managementService.getResources(QueueControl.class);
Assert.assertEquals(1, queues.length);
Assert.assertTrue(queues[0] instanceof QueueControl);
QueueControl queueControl = (QueueControl)queues[0];
Assert.assertEquals(queue.getName().toString(), queueControl.getName());
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
// Inner classes -------------------------------------------------
}
| apache-2.0 |
tberthel/gaevfs | src/com/newatlanta/repackaged/java/nio/channels/AsynchronousChannel.java | 5531 | /*
* Copyright 2007-2009 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package com.newatlanta.repackaged.java.nio.channels;
import java.io.IOException;
import java.nio.channels.AsynchronousCloseException;
import java.nio.channels.Channel;
import java.nio.channels.ClosedChannelException;
import java.util.concurrent.Future; // javadoc
/**
* A channel that supports asynchronous I/O operations. Asynchronous I/O
* operations will usually take one of two forms:
*
* <ol>
* <li><pre>{@link Future}<V> <em>operation</em>(<em>...</em>)</pre></li>
* <li><pre>Future<V> <em>operation</em>(<em>...</em> A attachment, {@link CompletionHandler}<V,? super A> handler)</pre></li>
* </ol>
*
* where <i>operation</i> is the name of the I/O operation (read or write for
* example), <i>V</i> is the result type of the I/O operation, and <i>A</i> is
* the type of an object attached to the I/O operation to provide context when
* consuming the result. The attachment is important for cases where a
* <em>state-less</em> {@code CompletionHandler} is used to consume the result
* of many I/O operations.
*
* <p> In the first form, the methods defined by the {@link Future Future}
* interface may be used to check if the operation has completed, wait for its
* completion, and to retrieve the result. In the second form, a {@link
* CompletionHandler} is invoked to consume the result of the I/O operation when
* it completes, fails, or is cancelled.
*
* <p> A channel that implements this interface is <em>asynchronously
* closeable</em>: If an I/O operation is outstanding on the channel and the
* channel's {@link #close close} method is invoked, then the I/O operation
* fails with the exception {@link AsynchronousCloseException}.
*
* <p> Asynchronous channels are safe for use by multiple concurrent threads.
* Some channel implementations may support concurrent reading and writing, but
* may not allow more than one read and one write operation to be outstanding at
* any given time.
*
* <h4>Cancellation</h4>
*
* <p> The {@code Future} interface defines the {@link Future#cancel cancel}
* method to cancel execution of a task.
*
* <p> Where the {@code cancel} method is invoked with the {@code
* mayInterruptIfRunning} parameter set to {@code true} then the I/O operation
* may be interrupted by closing the channel. This will cause any other I/O
* operations outstanding on the channel to complete with the exception {@link
* AsynchronousCloseException}.
*
* <p> If a {@code CompletionHandler} is specified when initiating an I/O
* operation, and the {@code cancel} method is invoked to cancel the I/O
* operation before it completes, then the {@code CompletionHandler}'s {@link
* CompletionHandler#cancelled cancelled} method is invoked.
*
* <p> If an implementation of this interface supports a means to cancel I/O
* operations, and where cancellation may leave the channel, or the entity to
* which it is connected, in an inconsistent state, then the channel is put into
* an implementation specific <em>error state</em> that prevents further
* attempts to initiate I/O operations on the channel. For example, if a read
* operation is cancelled but the implementation cannot guarantee that bytes
* have not been read from the channel then it puts the channel into error state
* state; further attempts to initiate a {@code read} operation causes an
* unspecified runtime exception to be thrown.
*
* <p> Where the {@code cancel} method is invoked to cancel read or write
* operations then it recommended that all buffers used in the I/O operations be
* discarded or care taken to ensure that the buffers are not accessed while the
* channel remains open.
*
* @since 1.7
*/
public interface AsynchronousChannel
extends Channel
{
/**
* Closes this channel.
*
* <p> Any outstanding asynchronous operations upon this channel will
* complete with the exception {@link AsynchronousCloseException}. After a
* channel is closed then further attempts to initiate asynchronous I/O
* operations complete immediately with cause {@link ClosedChannelException}.
*
* <p> This method otherwise behaves exactly as specified by the {@link
* Channel} interface.
*
* @throws IOException
* If an I/O error occurs
*/
void close() throws IOException;
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-dlp/v2/1.31.0/com/google/api/services/dlp/v2/model/GooglePrivacyDlpV2FinishDlpJobRequest.java | 1693 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dlp.v2.model;
/**
* The request message for finishing a DLP hybrid job.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Data Loss Prevention (DLP) API. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GooglePrivacyDlpV2FinishDlpJobRequest extends com.google.api.client.json.GenericJson {
@Override
public GooglePrivacyDlpV2FinishDlpJobRequest set(String fieldName, Object value) {
return (GooglePrivacyDlpV2FinishDlpJobRequest) super.set(fieldName, value);
}
@Override
public GooglePrivacyDlpV2FinishDlpJobRequest clone() {
return (GooglePrivacyDlpV2FinishDlpJobRequest) super.clone();
}
}
| apache-2.0 |
Invisibi/android-range-seek-bar | rangeseekbar-sample/src/main/java/com/yahoo/mobile/client/android/demo/DemoActivity.java | 1693 | /*
Copyright 2014 Yahoo Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.yahoo.mobile.client.android.demo;
import android.app.Activity;
import android.os.Bundle;
import android.widget.LinearLayout;
import com.yahoo.mobile.client.android.util.rangeseekbar.RangeSeekBar;
public class DemoActivity extends Activity {
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Setup the new range seek bar
RangeSeekBar<Integer> rangeSeekBar = new RangeSeekBar<Integer>(this);
// Set the range
rangeSeekBar.setRangeValues(15, 90);
rangeSeekBar.setSelectedMinValue(20);
rangeSeekBar.setSelectedMaxValue(88);
// Add to layout
LinearLayout layout = (LinearLayout) findViewById(R.id.seekbar_placeholder);
layout.addView(rangeSeekBar);
RangeSeekBar rangeSeekBarTextColorWithCode = (RangeSeekBar) findViewById(R.id.rangeSeekBarTextColorWithCode);
rangeSeekBarTextColorWithCode.setTextAboveThumbsColorResource(android.R.color.holo_blue_bright);
}
}
| apache-2.0 |
fastcat-co/fastcatsearch | core/src/main/java/org/fastcatsearch/ir/index/ShardFilter.java | 2273 | //package org.fastcatsearch.ir.index;
//
//import java.util.HashSet;
//import java.util.Map;
//import java.util.Set;
//
//import org.fastcatsearch.ir.document.Document;
//import org.fastcatsearch.ir.field.Field;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
//
///**
// * shard 설정의 filter에 맞는 문서인지를 판가름한다.
// * 향후 복잡한 filer식도 지원하도록 한다. >= 와 boolean연산. 그리고 다양한 필드셋팅에 대해서..
// * */
//public class ShardFilter {
// private static Logger logger = LoggerFactory.getLogger(ShardFilter.class);
//
// public static String ROOT_FILTER = "ALL";
// private Integer index;
// protected Set<String> dataSet;
// private boolean hasAll;
//
// public ShardFilter(Map<String, Integer> fieldSequenceMap, String filter) {
// filter = filter.trim();
//
// if(filter.equals(ROOT_FILTER)){
// hasAll = true;
// return;
// }
//
// dataSet = new HashSet<String>();
//
// try{
// int pos = filter.indexOf('=');
//
// String fieldId = filter.substring(0, pos).trim().toUpperCase();
// String value = filter.substring(pos + 1).trim();
// String[] values = null;
// if(value.startsWith("(")){
// values = value.substring(1, value.length() - 1).split(",");
// }
//
//
// if(values != null){
// //multi value
// for (int i = 0; i < values.length; i++) {
// dataSet.add(stripString(values[i].trim()));
// }
// }else{
// //single value
// dataSet.add(stripString(value));
// }
//
// index = fieldSequenceMap.get(fieldId);
//
// logger.debug("shard filter id={} index={} dataSet={}", fieldId, index, dataSet);
// }catch(Exception e){
// logger.error("Filter condition has error.filter >> \"{}\"", filter);
// }
// }
//
// private String stripString(String str){
// if(str.startsWith("'") && str.endsWith("'")){
// return str.substring(1, str.length() - 1);
// }
// return str;
// }
//
// public boolean accept(Document document) {
// if(hasAll){
// return true;
// }
// if(index == null){
// return false;
// }
//
// Field field = document.get(index);
// if(field == null){
// return false;
// }
//
// String fieldValue = field.getDataString();
// return dataSet.contains(fieldValue);
// }
//}
| apache-2.0 |
levanhien8/SokkerViewer | src/main/java/pl/pronux/sokker/comparators/SpyPlayersComparator.java | 7187 | package pl.pronux.sokker.comparators;
import java.text.Collator;
import java.util.Collections;
import java.util.Locale;
import pl.pronux.sokker.interfaces.Sort;
import pl.pronux.sokker.interfaces.SVComparator;
import pl.pronux.sokker.model.Player;
import pl.pronux.sokker.model.PlayerStats;
public class SpyPlayersComparator implements SVComparator<Player>, Sort {
public static final int COUNTRY = 0;
public static final int NAME = 1;
public static final int SURNAME = 2;
public static final int HEIGHT = 3;
public static final int WEIGHT = 4;
public static final int BMI = 5;
public static final int VALUE = 6;
public static final int SALARY = 7;
public static final int AGE = 8;
public static final int FORM = 9;
public static final int DISCIPLINE = 10;
public static final int EXPERIENCE = 11;
public static final int TEAMWORK = 12;
public static final int MATCHES = 13;
public static final int GOALS = 14;
public static final int ASSISTS = 15;
public static final int RANKING_AVG = 16;
public static final int RANKING_MAX = 17;
public static final int RANKING_MIN = 18;
public static final int PREFERRED_POSITION = 19;
public static final int CARDS = 20;
public static final int INJURY = 21;
public static final int NOTE = 22;
private int column;
private int direction;
/**
* Compares two Player objects
*
* @param obj1
* the first Player
* @param obj2
* the second Player
* @return int
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
public int compare(Player p1, Player p2) {
int rc = 0;
Locale loc = Locale.getDefault();
Collator coll = Collator.getInstance(loc);
// Determine which field to sort on, then sort
// on that field
switch (column) {
case NAME:
rc = coll.compare(p1.getName(), p2.getName());
// rc = p1.getName().compareTo(p2.getName());
break;
case SURNAME:
rc = coll.compare(p1.getSurname(), p2.getSurname());
// rc = p1.getSurname().compareTo(p2.getSurname());
break;
case HEIGHT:
rc = p1.getHeight() < p2.getHeight() ? -1 : 1;
break;
case WEIGHT:
rc = (p1.getSkills()[p1.getSkills().length - 1].getWeight() < p2.getSkills()[p2.getSkills().length - 1].getWeight()) ? -1 : 1;
break;
case BMI:
rc = (p1.getSkills()[p1.getSkills().length - 1].getBmi() < p2.getSkills()[p2.getSkills().length - 1].getBmi()) ? -1 : 1;
break;
case COUNTRY:
rc = (p1.getCountryfrom() < p2.getCountryfrom()) ? -1 : 1;
break;
case VALUE:
rc = p1.getSkills()[p1.getSkills().length - 1].getValue().compareTo(p2.getSkills()[p2.getSkills().length - 1].getValue());
break;
case SALARY:
rc = p1.getSkills()[p1.getSkills().length - 1].getSalary().compareTo(p2.getSkills()[p2.getSkills().length - 1].getSalary());
break;
case AGE:
rc = (p1.getSkills()[p1.getSkills().length - 1].getAge() < p2.getSkills()[p2.getSkills().length - 1].getAge()) ? -1 : 1;
break;
case FORM:
rc = (p1.getSkills()[p1.getSkills().length - 1].getForm() < p2.getSkills()[p2.getSkills().length - 1].getForm()) ? -1 : 1;
break;
case DISCIPLINE:
rc = (p1.getSkills()[p1.getSkills().length - 1].getDiscipline() < p2.getSkills()[p2.getSkills().length - 1].getDiscipline()) ? -1 : 1;
break;
case EXPERIENCE:
rc = (p1.getSkills()[p1.getSkills().length - 1].getExperience() < p2.getSkills()[p2.getSkills().length - 1].getExperience()) ? -1 : 1;
break;
case TEAMWORK:
rc = (p1.getSkills()[p1.getSkills().length - 1].getTeamwork() < p2.getSkills()[p2.getSkills().length - 1].getTeamwork()) ? -1 : 1;
break;
case MATCHES:
rc = (p1.getSkills()[p1.getSkills().length - 1].getMatches() < p2.getSkills()[p2.getSkills().length - 1].getMatches()) ? -1 : 1;
break;
case GOALS:
rc = (p1.getSkills()[p1.getSkills().length - 1].getGoals() < p2.getSkills()[p2.getSkills().length - 1].getGoals()) ? -1 : 1;
break;
case ASSISTS:
rc = (p1.getSkills()[p1.getSkills().length - 1].getAssists() < p2.getSkills()[p2.getSkills().length - 1].getAssists()) ? -1 : 1;
break;
case NOTE:
if (p1.getNote() == null && p2.getNote() == null) {
rc = 0;
} else if (p1.getNote() != null && p2.getNote() == null) {
rc = 1;
} else if (p1.getNote() == null && p2.getNote() != null) {
rc = -1;
} else {
rc = (p1.getNote().compareTo(p2.getNote()));
}
break;
case CARDS:
rc = (p1.getSkills()[p1.getSkills().length - 1].getCards() < p2.getSkills()[p2.getSkills().length - 1].getCards()) ? -1 : 1;
break;
case INJURY:
rc = (p1.getSkills()[p1.getSkills().length - 1].getInjurydays() < p2.getSkills()[p2.getSkills().length - 1].getInjurydays()) ? -1 : 1;
break;
case RANKING_AVG:
rc = (p1.getAvgRating() < p2.getAvgRating()) ? -1 : 1;
break;
case PREFERRED_POSITION:
rc = (p1.getPreferredPosition() < p2.getPreferredPosition()) ? -1 : 1;
break;
case RANKING_MAX:
if (p1.getPlayerMatchStatistics().size() == 0 && p2.getPlayerMatchStatistics().size() == 0) {
rc = 0;
} else if (p1.getPlayerMatchStatistics().size() > 0 && p2.getPlayerMatchStatistics().size() == 0) {
rc = 1;
} else if (p1.getPlayerMatchStatistics().size() == 0 && p2.getPlayerMatchStatistics().size() > 0) {
rc = -1;
} else {
PlayerStats p1max = Collections.max(p1.getPlayerMatchStatistics(), new PlayerStatsComparator(PlayerStatsComparator.RATING, PlayerStatsComparator.ASCENDING));
PlayerStats p2max = Collections.max(p2.getPlayerMatchStatistics(), new PlayerStatsComparator(PlayerStatsComparator.RATING, PlayerStatsComparator.ASCENDING));
rc = (p1max.getRating() < p2max.getRating()) ? -1 : 1;
}
break;
case RANKING_MIN:
if (p1.getPlayerMatchStatistics().size() == 0 && p2.getPlayerMatchStatistics().size() == 0) {
rc = 0;
} else if (p1.getPlayerMatchStatistics().size() > 0 && p2.getPlayerMatchStatistics().size() == 0) {
rc = 1;
} else if (p1.getPlayerMatchStatistics().size() == 0 && p2.getPlayerMatchStatistics().size() > 0) {
rc = -1;
} else {
PlayerStats p1min = Collections.min(p1.getPlayerMatchStatistics(), new PlayerStatsComparator(PlayerStatsComparator.RATING, PlayerStatsComparator.ASCENDING));
PlayerStats p2min = Collections.min(p2.getPlayerMatchStatistics(), new PlayerStatsComparator(PlayerStatsComparator.RATING, PlayerStatsComparator.ASCENDING));
rc = (p1min.getRating() < p2min.getRating()) ? -1 : 1;
}
break;
default:
// TODO: Implement 'default' statement
break;
}
// Check the direction for sort and flip the sign
// if appropriate
if (direction == DESCENDING) {
rc = -rc;
}
return rc;
}
/**
* Sets the column for sorting
*
* @param column
* the column
*/
public void setColumn(int column) {
this.column = column;
}
public int getColumn() {
return column;
}
/**
* Sets the direction for sorting
*
* @param direction
* the direction
*/
public void setDirection(int direction) {
this.direction = direction;
}
public int getDirection() {
return direction;
}
/**
* Reverses the direction
*/
public void reverseDirection() {
direction = 1 - direction;
}
} | apache-2.0 |
amitmawkin/Hygieia | collectors/test-results/jira-xray/src/main/java/com/capitalone/dashboard/core/json/gen/TestExecJsonGenerator.java | 534 | package com.capitalone.dashboard.core.json.gen;
import com.atlassian.jira.rest.client.internal.json.gen.JsonGenerator;
import com.capitalone.dashboard.api.domain.TestExecution;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
/**
* This class will generate a JSON Object for a Test Execution
*/
public class TestExecJsonGenerator implements JsonGenerator<TestExecution> {
public JSONObject generate(TestExecution testExecution) throws JSONException {
return null;
}
}
| apache-2.0 |
jwren/intellij-community | platform/platform-api/src/com/intellij/ui/SearchTextField.java | 13355 | // Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.components.JBList;
import com.intellij.ui.components.JBTextField;
import com.intellij.ui.scale.JBUIScale;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentListener;
import javax.swing.plaf.TextUI;
import java.awt.*;
import java.awt.event.*;
import java.util.ArrayList;
import java.util.List;
public class SearchTextField extends JPanel {
public static final DataKey<SearchTextField> KEY = DataKey.create("search.text.field");
public static final KeyStroke SHOW_HISTORY_KEYSTROKE = KeyStroke.getKeyStroke(KeyEvent.VK_DOWN, InputEvent.ALT_DOWN_MASK);
public static final CustomShortcutSet SHOW_HISTORY_SHORTCUT = new CustomShortcutSet(SHOW_HISTORY_KEYSTROKE);
public static final KeyStroke ALT_SHOW_HISTORY_KEYSTROKE = KeyStroke.getKeyStroke(KeyEvent.VK_UP, InputEvent.ALT_DOWN_MASK);
public static final CustomShortcutSet ALT_SHOW_HISTORY_SHORTCUT = new CustomShortcutSet(ALT_SHOW_HISTORY_KEYSTROKE);
private int myHistorySize = 5;
private int myCurrentHistoryIndex;
private final MyModel myModel;
private final TextFieldWithProcessing myTextField;
@Nullable
private JBPopup myPopup;
private String myHistoryPropertyName;
private final boolean historyPopupEnabled;
public SearchTextField() {
this(true);
}
public SearchTextField(boolean historyPopupEnabled) {
this(historyPopupEnabled, null);
}
public SearchTextField(@NonNls String historyPropertyName) {
this(true, historyPropertyName);
}
public SearchTextField(boolean historyPopupEnabled, @Nullable String historyPropertyName) {
super(new BorderLayout());
this.historyPopupEnabled = historyPopupEnabled;
myModel = new MyModel();
myTextField = new TextFieldWithProcessing() {
{
this.putClientProperty("History.Popup.Enabled", historyPopupEnabled);
}
@Override
public void processKeyEvent(final KeyEvent e) {
if (preprocessEventForTextField(e)) return;
super.processKeyEvent(e);
}
@Override
protected void processMouseEvent(MouseEvent e) {
TextUI ui = getUI();
//noinspection unchecked
if (ui instanceof Condition && ((Condition)ui).value(e)) return;
if(e.getX() < JBUIScale.scale(28) && myModel.myFullList.size() > 0) {
myTextField.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
if (e.getClickCount() == 1) {
showPopup();
}
} else {
myTextField.setCursor(Cursor.getPredefinedCursor(Cursor.TEXT_CURSOR));
}
super.processMouseEvent(e);
}
@Override
protected Rectangle getEmptyTextComponentBounds(Rectangle bounds) {
Integer gap = (Integer)getClientProperty("JTextField.Search.GapEmptyText");
if (gap != null) {
bounds.x += gap;
bounds.width -= 2 * gap;
}
return bounds;
}
};
myTextField.setColumns(15);
myTextField.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
onFocusLost();
super.focusLost(e);
}
@Override
public void focusGained(FocusEvent e) {
onFocusGained();
super.focusGained(e);
}
});
add(myTextField, BorderLayout.CENTER);
setHistoryPropertyName(historyPropertyName);
if (historyPropertyName != null) {
myTextField.getActionMap().put("showPrevHistoryItem", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
if (!myModel.myFullList.contains(getText())) addCurrentTextToHistory();
if (myModel.getSize() < 2) return;
myCurrentHistoryIndex--;
if (myCurrentHistoryIndex < 0) myCurrentHistoryIndex = myModel.getSize() - 1;
setText(myModel.getElementAt(myCurrentHistoryIndex));
}
});
myTextField.getInputMap().put(ALT_SHOW_HISTORY_KEYSTROKE, "showPrevHistoryItem");
myTextField.getActionMap().put("showNextHistoryItem", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
if (!myModel.myFullList.contains(getText())) addCurrentTextToHistory();
if (myModel.getSize() < 2) return;
myCurrentHistoryIndex++;
if (myCurrentHistoryIndex > myModel.getSize() - 1) myCurrentHistoryIndex = 0;
setText(myModel.getElementAt(myCurrentHistoryIndex));
}
});
myTextField.getInputMap().put(SHOW_HISTORY_KEYSTROKE, "showNextHistoryItem");
}
myTextField.putClientProperty("JTextField.variant", "search");
myTextField.putClientProperty("JTextField.Search.Gap", JBUIScale.scale(6));
myTextField.putClientProperty("JTextField.Search.CancelAction", (ActionListener)e -> {
myTextField.setText("");
onFieldCleared();
});
DumbAwareAction.create(event -> {
showPopup();
}).registerCustomShortcutSet(KeymapUtil.getActiveKeymapShortcuts("ShowSearchHistory"), myTextField);
}
@Override
public void addNotify() {
super.addNotify();
if (toClearTextOnEscape()) {
ActionManager actionManager = ActionManager.getInstance();
if (actionManager != null) {
EmptyAction.registerWithShortcutSet(IdeActions.ACTION_CLEAR_TEXT, CommonShortcuts.ESCAPE, this);
}
}
}
protected boolean toClearTextOnEscape() {
return ApplicationManager.getApplication() != null;
}
protected void onFieldCleared() {
}
protected void onFocusLost() {
addCurrentTextToHistory();
}
protected void onFocusGained() {
}
public void addDocumentListener(DocumentListener listener) {
getTextEditor().getDocument().addDocumentListener(listener);
}
public void removeDocumentListener(DocumentListener listener) {
getTextEditor().getDocument().removeDocumentListener(listener);
}
public void addKeyboardListener(final KeyListener listener) {
getTextEditor().addKeyListener(listener);
}
public void setHistorySize(int historySize) {
if (historySize <= 0) throw new IllegalArgumentException("history size must be a positive number");
myHistorySize = historySize;
}
public void setHistory(List<String> aHistory) {
myModel.setItems(aHistory);
}
public List<String> getHistory() {
final int itemsCount = myModel.getSize();
final List<String> history = new ArrayList<>(itemsCount);
for (int i = 0; i < itemsCount; i++) {
history.add(myModel.getElementAt(i));
}
return history;
}
public void setText(String aText) {
getTextEditor().setText(aText);
}
public String getText() {
return getTextEditor().getText();
}
@Override
public void removeNotify() {
super.removeNotify();
hidePopup();
}
public void addCurrentTextToHistory() {
if (myModel.addElement(getText()) && myHistoryPropertyName != null) {
PropertiesComponent.getInstance().setValue(myHistoryPropertyName, StringUtil.join(getHistory(), "\n"));
}
}
protected void historyItemChosen(String item) {
}
public void selectText() {
getTextEditor().selectAll();
}
public JBTextField getTextEditor() {
return myTextField;
}
@Override
public boolean requestFocusInWindow() {
return myTextField.requestFocusInWindow();
}
@Override
public void requestFocus() {
IdeFocusManager.getGlobalInstance()
.doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(getTextEditor(), true));
}
protected void setHistoryPropertyName(String historyPropertyName) {
myHistoryPropertyName = historyPropertyName;
myTextField.putClientProperty("JTextField.Search.InplaceHistory", myHistoryPropertyName);
reset();
}
public void reset() {
if (myHistoryPropertyName == null) return;
final PropertiesComponent propertiesComponent = PropertiesComponent.getInstance();
final String history = propertiesComponent.getValue(myHistoryPropertyName);
if (history != null) {
final String[] items = history.split("\n");
ArrayList<String> result = new ArrayList<>();
for (String item : items) {
if (item != null && item.length() > 0) {
result.add(item);
}
}
setHistory(result);
}
else {
setEmptyHistory();
}
setSelectedItem("");
}
protected void setEmptyHistory() {
}
public class MyModel extends AbstractListModel<String> {
private List<String> myFullList = new ArrayList<>();
private String mySelectedItem;
@Override
public String getElementAt(int index) {
return myFullList.get(index);
}
@Override
public int getSize() {
return Math.min(myHistorySize, myFullList.size());
}
public boolean addElement(String item) {
final String newItem = item.trim();
if (newItem.isEmpty()) {
return false;
}
final int length = myFullList.size();
int index = -1;
for (int i = 0; i < length; i++) {
if (StringUtil.equalsIgnoreCase(myFullList.get(i), newItem)) {
index = i;
break;
}
}
if (index == 0) {
// item is already at the top of the list
return false;
}
if (index > 0) {
// move item to top of the list
myFullList.remove(index);
}
else if (myFullList.size() >= myHistorySize && myFullList.size() > 0) {
// trim list
myFullList.remove(myFullList.size() - 1);
}
insertElementAt(newItem, 0);
return true;
}
public void insertElementAt(String item, int index) {
myFullList.add(index, item);
fireContentsChanged();
}
public String getSelectedItem() {
return mySelectedItem;
}
public void setSelectedItem(String anItem) {
mySelectedItem = anItem;
}
public void fireContentsChanged() {
fireContentsChanged(this, -1, -1);
updatePopup();
}
public void setItems(List<String> aList) {
myFullList = new ArrayList<>(aList);
fireContentsChanged();
}
}
protected void hidePopup() {
if (myPopup != null) {
myPopup.cancel();
myPopup = null;
}
}
protected Runnable createItemChosenCallback(final JList list) {
return () -> {
final String value = (String)list.getSelectedValue();
getTextEditor().setText(value != null ? value : "");
addCurrentTextToHistory();
};
}
protected void showPopup() {
addCurrentTextToHistory();
if (myPopup != null && myPopup.isVisible()) return;
if (historyPopupEnabled) {
doShowPopup();
}
}
private void updatePopup() {
if (myPopup != null && myPopup.isVisible()) {
hidePopup();
doShowPopup();
}
}
private void doShowPopup() {
if (ApplicationManager.getApplication() != null &&
JBPopupFactory.getInstance() != null &&
isShowing()) {
final JList<String> list = new JBList<>(myModel);
final Runnable chooseRunnable = createItemChosenCallback(list);
myPopup = JBPopupFactory.getInstance().createListPopupBuilder(list)
.setMovable(false)
.setRequestFocus(true)
.setItemChoosenCallback(chooseRunnable).createPopup();
myPopup.showUnderneathOf(getPopupLocationComponent());
}
}
protected Component getPopupLocationComponent() {
return this;
}
public void setSelectedItem(final String s) {
getTextEditor().setText(s);
}
public int getSelectedIndex() {
return myModel.myFullList.indexOf(getText());
}
protected static class TextFieldWithProcessing extends JBTextField {
@Override
public void processKeyEvent(KeyEvent e) {
super.processKeyEvent(e);
}
}
protected final void keyEventToTextField(KeyEvent e) {
myTextField.processKeyEvent(e);
}
protected boolean preprocessEventForTextField(KeyEvent e) {
if (SHOW_HISTORY_KEYSTROKE.equals(KeyStroke.getKeyStrokeForEvent(e))) {
showPopup();
return true;
}
return false;
}
/**
* @deprecated unused
*/
@Deprecated(forRemoval = true)
public void setSearchIcon(final Icon icon) {
}
public static final class FindAction extends DumbAwareAction {
@Override
public void actionPerformed(@NotNull AnActionEvent event) {
SearchTextField search = event.getData(KEY);
if (search != null) {
search.selectText();
search.requestFocus();
}
}
@Override
public void update(@NotNull AnActionEvent e) {
e.getPresentation().setEnabledAndVisible(e.getData(KEY) != null);
}
}
}
| apache-2.0 |
cloudera/cdk | cdk-morphlines/cdk-morphlines-core/src/test/java/com/cloudera/cdk/morphline/api/SimpleHoconConfigTest.java | 4927 | /*
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.cdk.morphline.api;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigObject;
import com.typesafe.config.ConfigValueFactory;
/** See https://github.com/typesafehub/config */
public class SimpleHoconConfigTest extends Assert {
private static final String TIKA_CONFIG_LOCATION = "tika.config";
@Test
@Ignore
public void testBasic() {
Config conf = ConfigFactory.load("test-application").getConfig(getClass().getPackage().getName() + ".test");
assertEquals(conf.getString("foo.bar"), "1234");
assertEquals(conf.getInt("foo.bar"), 1234);
//assertEquals(conf.getInt("moo.bar"), 56789); // read from reference.config
Config subConfig = conf.getConfig("foo");
assertNotNull(subConfig);
assertEquals(subConfig.getString("bar"), "1234");
assertFalse(conf.hasPath("missing.foox.barx"));
try {
conf.getString("missing.foox.barx");
fail("Failed to detect missing param");
} catch (ConfigException.Missing e) {}
Iterator userNames = Arrays.asList("nadja", "basti").iterator();
Iterator passwords = Arrays.asList("nchangeit", "bchangeit").iterator();
for (Config user : conf.getConfigList("users")) {
assertEquals(user.getString("userName"), userNames.next());
assertEquals(user.getString("password"), passwords.next());
}
assertFalse(userNames.hasNext());
assertFalse(passwords.hasNext());
assertEquals(conf.getStringList("files.paths"), Arrays.asList("dir/file1.log", "dir/file2.txt"));
Iterator schemas = Arrays.asList("schema1.json", "schema2.json").iterator();
Iterator globs = Arrays.asList("*.log*", "*.txt*").iterator();
for (Config fileMapping : conf.getConfigList("files.fileMappings")) {
assertEquals(fileMapping.getString("schema"), schemas.next());
assertEquals(fileMapping.getString("glob"), globs.next());
}
assertFalse(schemas.hasNext());
assertFalse(globs.hasNext());
// Object list2 = conf.entrySet();
// Object list2 = conf.getAnyRef("users.userName");
// assertEquals(conf.getString("users.user.userName"), "nadja");
}
@Test
public void testParseMap() { // test access based on path
final Map<String, String> map = new HashMap();
map.put(TIKA_CONFIG_LOCATION, "src/test/resources/tika-config.xml");
map.put("collection1.testcoll.solr.home", "target/test-classes/solr/collection1");
// Config config = ConfigValueFactory.fromMap(new Context(map).getParameters()).toConfig();
Config config = ConfigFactory.parseMap(map);
String filePath = config.getString(TIKA_CONFIG_LOCATION);
assertEquals(map.get(TIKA_CONFIG_LOCATION), filePath);
Config subConfig = config.getConfig("collection1").getConfig("testcoll");
assertEquals("target/test-classes/solr/collection1", subConfig.getString("solr.home"));
}
@Test
public void testFromMap() { // test access based on key
final Map<String, String> map = new HashMap();
map.put(TIKA_CONFIG_LOCATION, "src/test/resources/tika-config.xml");
String key = "collection1.testcoll.solr.home";
map.put(key, "target/test-classes/solr/collection1");
ConfigObject config = ConfigValueFactory.fromMap(map);
String filePath = config.get(TIKA_CONFIG_LOCATION).unwrapped().toString();
assertEquals(map.get(TIKA_CONFIG_LOCATION), filePath);
assertEquals(map.get(key), config.get(key).unwrapped().toString());
}
@Test
public void testCacheBuilder() throws ExecutionException {
LoadingCache<String, Matcher> cache = CacheBuilder.newBuilder()
.maximumSize(10)
.build(
new CacheLoader<String, Matcher>() {
public Matcher load(String key) {
return Pattern.compile(key).matcher("");
}
});
Matcher m = cache.get(".*");
Matcher m2 = cache.get(".*");
}
}
| apache-2.0 |
Sage-Bionetworks/Synapse-Repository-Services | lib/jdomodels/src/main/java/org/sagebionetworks/repo/model/dbo/dao/dataaccess/RequestDAO.java | 1319 | package org.sagebionetworks.repo.model.dbo.dao.dataaccess;
import org.sagebionetworks.repo.model.dataaccess.Request;
import org.sagebionetworks.repo.model.dataaccess.RequestInterface;
import org.sagebionetworks.repo.web.NotFoundException;
public interface RequestDAO {
/**
* Create a new Request.
*
* @param toCreate
* @return
*/
public Request create(Request toCreate);
/**
* Retrieve the current request that the user created (or own) for the given accessRequirementId.
*
* @param accessRequirementId
* @param userId
* @return
* @throws NotFoundException
*/
public RequestInterface getUserOwnCurrentRequest(String accessRequirementId, String userId) throws NotFoundException;
/**
* Update an existing Request.
*
* @param toUpdate
* @return
* @throws NotFoundException
*/
public RequestInterface update(RequestInterface toUpdate) throws NotFoundException;
/**
* used for tests
*/
void delete(String id);
/**
* Retrieve the current Request for update.
*
* @param id
* @effect this call will put a lock on the returned object.
* @return
*/
public RequestInterface getForUpdate(String id);
/**
* Retrieve the current Request.
*
* @param id
* @return
*/
public RequestInterface get(String id);
// For testing
void truncateAll();
}
| apache-2.0 |
akshatknsl/keywhiz | server/src/test/java/keywhiz/service/daos/GroupDAOTest.java | 3709 | /*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package keywhiz.service.daos;
import com.google.inject.Guice;
import com.google.inject.testing.fieldbinder.Bind;
import com.google.inject.testing.fieldbinder.BoundFieldModule;
import java.util.List;
import java.util.Optional;
import javax.inject.Inject;
import keywhiz.TestDBRule;
import keywhiz.api.model.Group;
import keywhiz.service.config.Readonly;
import keywhiz.service.daos.GroupDAO.GroupDAOFactory;
import org.jooq.DSLContext;
import org.jooq.exception.DataAccessException;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import static java.util.stream.Collectors.toList;
import static keywhiz.jooq.tables.Groups.GROUPS;
import static org.assertj.core.api.Assertions.assertThat;
public class GroupDAOTest {
@Rule public final TestDBRule testDBRule = new TestDBRule();
@Bind DSLContext jooqContext;
@Bind @Readonly DSLContext jooqReadonlyContext;
@Inject GroupDAOFactory groupDAOFactory;
Group group1, group2;
GroupDAO groupDAO;
@Before public void setUp() throws Exception {
jooqContext = jooqReadonlyContext = testDBRule.jooqContext();
Guice.createInjector(BoundFieldModule.of(this)).injectMembers(this);
groupDAO = groupDAOFactory.readwrite();
testDBRule.jooqContext().insertInto(GROUPS,
GROUPS.NAME, GROUPS.DESCRIPTION, GROUPS.CREATEDBY, GROUPS.UPDATEDBY)
.values("group1", "desc1", "creator1", "updater1")
.values("group2", "desc2", "creator2", "updater2")
.execute();
group1 = groupDAO.getGroup("group1").get();
group2 = groupDAO.getGroup("group2").get();
}
@Test public void createGroup() {
int before = tableSize();
groupDAO.createGroup("newGroup", "creator3", Optional.empty());
assertThat(tableSize()).isEqualTo(before + 1);
List<String> names = groupDAO.getGroups()
.stream()
.map(Group::getName)
.collect(toList());
assertThat(names).contains("newGroup");
}
@Test public void deleteGroup() {
int before = tableSize();
groupDAO.deleteGroup(group1);
assertThat(tableSize()).isEqualTo(before - 1);
assertThat(groupDAO.getGroups()).containsOnly(group2);
}
@Test public void getGroup() {
// getGroup is performed in setup()
assertThat(group1.getName()).isEqualTo("group1");
assertThat(group1.getDescription()).isEqualTo("desc1");
assertThat(group1.getCreatedBy()).isEqualTo("creator1");
assertThat(group1.getUpdatedBy()).isEqualTo("updater1");
}
@Test public void getGroupById() {
assertThat(groupDAO.getGroupById(group1.getId())).contains(group1);
}
@Test public void getNonExistentGroup() {
assertThat(groupDAO.getGroup("non-existent")).isEmpty();
assertThat(groupDAO.getGroupById(-1234)).isEmpty();
}
@Test public void getGroups() {
assertThat(groupDAO.getGroups()).containsOnly(group1, group2);
}
@Test(expected = DataAccessException.class)
public void willNotCreateDuplicateGroup() throws Exception {
groupDAO.createGroup("group1", "creator1", Optional.empty());
}
private int tableSize() {
return jooqContext.fetchCount(GROUPS);
}
}
| apache-2.0 |
apache/sis | core/sis-metadata/src/test/java/org/apache/sis/metadata/TypeMapTest.java | 6197 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.metadata;
import java.util.Map;
import java.util.Collection;
import java.util.Date;
import org.opengis.metadata.Identifier;
import org.opengis.metadata.citation.Citation;
import org.opengis.metadata.citation.CitationDate;
import org.opengis.metadata.citation.OnlineResource;
import org.opengis.metadata.citation.PresentationForm;
import org.opengis.metadata.citation.ResponsibleParty;
import org.opengis.metadata.citation.Series;
import org.opengis.metadata.extent.GeographicExtent;
import org.opengis.metadata.extent.GeographicDescription;
import org.opengis.metadata.identification.BrowseGraphic;
import org.opengis.util.InternationalString;
import org.apache.sis.metadata.iso.citation.DefaultCitation;
import org.apache.sis.metadata.iso.extent.AbstractGeographicExtent;
import org.apache.sis.metadata.iso.extent.DefaultGeographicDescription;
import org.apache.sis.test.DependsOn;
import org.apache.sis.test.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import static java.util.AbstractMap.SimpleEntry;
/**
* Tests the {@link TypeMap} class on instances created by
* {@link MetadataStandard#asTypeMap(Class, KeyNamePolicy, TypeValuePolicy)}.
* Unless otherwise specified, all tests use the {@link MetadataStandard#ISO_19115} constant.
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.0
* @since 0.3
* @module
*/
@DependsOn(PropertyAccessorTest.class)
public final strictfp class TypeMapTest extends TestCase {
/**
* Tests {@code TypeMap.entrySet()} for an exact match (including iteration order).
* The properties used in this test are listed in {@link PropertyAccessorTest#testConstructor()}.
*
* @see PropertyAccessorTest#testConstructor()
*/
@Test
public void testEntrySet() {
final Map<String,Class<?>> map = MetadataStandard.ISO_19115.asTypeMap(
Citation.class, KeyNamePolicy.UML_IDENTIFIER, TypeValuePolicy.ELEMENT_TYPE);
assertArrayEquals(new Object[] {
new SimpleEntry<>("title", InternationalString.class),
new SimpleEntry<>("alternateTitle", InternationalString.class),
new SimpleEntry<>("date", CitationDate.class),
new SimpleEntry<>("edition", InternationalString.class),
new SimpleEntry<>("editionDate", Date.class),
new SimpleEntry<>("identifier", Identifier.class),
new SimpleEntry<>("citedResponsibleParty", ResponsibleParty.class),
new SimpleEntry<>("presentationForm", PresentationForm.class),
new SimpleEntry<>("series", Series.class),
new SimpleEntry<>("otherCitationDetails", InternationalString.class),
// new SimpleEntry<>("collectiveTitle", InternationalString.class), -- deprecated as of ISO 19115:2014
new SimpleEntry<>("ISBN", String.class),
new SimpleEntry<>("ISSN", String.class),
new SimpleEntry<>("onlineResource", OnlineResource.class),
new SimpleEntry<>("graphic", BrowseGraphic.class)
}, map.entrySet().toArray());
assertEquals(InternationalString.class, map.get("alternateTitle"));
assertNull("Shall not exists.", map.get("dummy"));
}
/**
* Tests {@link TypeMap#get(Object)} on a well known metadata type for various {@link TypeValuePolicy}.
*/
@Test
public void testGet() {
final MetadataStandard standard = MetadataStandard.ISO_19115;
final KeyNamePolicy keyPolicy = KeyNamePolicy.JAVABEANS_PROPERTY;
Map<String, Class<?>> types;
types = standard.asTypeMap(DefaultCitation.class, keyPolicy, TypeValuePolicy.PROPERTY_TYPE);
assertEquals(InternationalString.class, types.get("title"));
assertEquals(Collection.class, types.get("alternateTitles"));
types = standard.asTypeMap(DefaultCitation.class, keyPolicy, TypeValuePolicy.ELEMENT_TYPE);
assertEquals(InternationalString.class, types.get("title"));
assertEquals(InternationalString.class, types.get("alternateTitles"));
types = standard.asTypeMap(DefaultCitation.class, keyPolicy, TypeValuePolicy.DECLARING_INTERFACE);
assertEquals(Citation.class, types.get("title"));
assertEquals(Citation.class, types.get("alternateTitles"));
types = standard.asTypeMap(DefaultCitation.class, keyPolicy, TypeValuePolicy.DECLARING_CLASS);
assertEquals(DefaultCitation.class, types.get("title"));
assertEquals(DefaultCitation.class, types.get("alternateTitles"));
/*
* Tests declaring classes/interfaces again, now with metadata having a class hierarchy.
*/
types = standard.asTypeMap(DefaultGeographicDescription.class, keyPolicy, TypeValuePolicy.DECLARING_INTERFACE);
assertEquals(GeographicDescription.class, types.get("geographicIdentifier"));
assertEquals(GeographicExtent.class, types.get("inclusion"));
types = standard.asTypeMap(DefaultGeographicDescription.class, keyPolicy, TypeValuePolicy.DECLARING_CLASS);
assertEquals(DefaultGeographicDescription.class, types.get("geographicIdentifier"));
assertEquals(AbstractGeographicExtent.class, types.get("inclusion"));
}
}
| apache-2.0 |
youngwookim/presto | presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaRecordSetProvider.java | 3707 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.kafka;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.prestosql.decoder.DispatchingRowDecoderFactory;
import io.prestosql.decoder.RowDecoder;
import io.prestosql.spi.connector.ColumnHandle;
import io.prestosql.spi.connector.ConnectorRecordSetProvider;
import io.prestosql.spi.connector.ConnectorSession;
import io.prestosql.spi.connector.ConnectorSplit;
import io.prestosql.spi.connector.ConnectorTableHandle;
import io.prestosql.spi.connector.ConnectorTransactionHandle;
import io.prestosql.spi.connector.RecordSet;
import javax.inject.Inject;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static io.prestosql.plugin.kafka.KafkaHandleResolver.convertSplit;
import static java.util.Objects.requireNonNull;
/**
* Factory for Kafka specific {@link RecordSet} instances.
*/
public class KafkaRecordSetProvider
implements ConnectorRecordSetProvider
{
private DispatchingRowDecoderFactory decoderFactory;
private final KafkaSimpleConsumerManager consumerManager;
@Inject
public KafkaRecordSetProvider(DispatchingRowDecoderFactory decoderFactory, KafkaSimpleConsumerManager consumerManager)
{
this.decoderFactory = requireNonNull(decoderFactory, "decoderFactory is null");
this.consumerManager = requireNonNull(consumerManager, "consumerManager is null");
}
@Override
public RecordSet getRecordSet(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorSplit split, ConnectorTableHandle table, List<? extends ColumnHandle> columns)
{
KafkaSplit kafkaSplit = convertSplit(split);
List<KafkaColumnHandle> kafkaColumns = columns.stream()
.map(KafkaHandleResolver::convertColumnHandle)
.collect(ImmutableList.toImmutableList());
RowDecoder keyDecoder = decoderFactory.create(
kafkaSplit.getKeyDataFormat(),
getDecoderParameters(kafkaSplit.getKeyDataSchemaContents()),
kafkaColumns.stream()
.filter(col -> !col.isInternal())
.filter(KafkaColumnHandle::isKeyDecoder)
.collect(toImmutableSet()));
RowDecoder messageDecoder = decoderFactory.create(
kafkaSplit.getMessageDataFormat(),
getDecoderParameters(kafkaSplit.getMessageDataSchemaContents()),
kafkaColumns.stream()
.filter(col -> !col.isInternal())
.filter(col -> !col.isKeyDecoder())
.collect(toImmutableSet()));
return new KafkaRecordSet(kafkaSplit, consumerManager, kafkaColumns, keyDecoder, messageDecoder);
}
private Map<String, String> getDecoderParameters(Optional<String> dataSchema)
{
ImmutableMap.Builder<String, String> parameters = ImmutableMap.builder();
dataSchema.ifPresent(schema -> parameters.put("dataSchema", schema));
return parameters.build();
}
}
| apache-2.0 |
internetisalie/lua-for-idea | src/main/java/com/sylvanaar/idea/Lua/editor/inspections/InspectionProvider.java | 2436 | /*
* Copyright 2010 Jon S Akhtar (Sylvanaar)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sylvanaar.idea.Lua.editor.inspections;
import com.intellij.codeInspection.InspectionToolProvider;
import com.sylvanaar.idea.Lua.editor.inspections.bugs.*;
import com.sylvanaar.idea.Lua.editor.inspections.metrics.LuaOverlyComplexMethodInspection;
import com.sylvanaar.idea.Lua.editor.inspections.metrics.LuaOverlyLongMethodInspection;
import com.sylvanaar.idea.Lua.editor.inspections.performance.RedundantInitializationInspection;
import com.sylvanaar.idea.Lua.editor.inspections.performance.StringConcatenationInLoopsInspection;
import com.sylvanaar.idea.Lua.editor.inspections.unassignedVariable.UnassignedVariableAccessInspection;
import com.sylvanaar.idea.Lua.editor.inspections.usage.UnusedDefInspection;
import com.sylvanaar.idea.Lua.editor.inspections.validity.LuaUnreachableStatementInspection;
/**
* Created by IntelliJ IDEA.
* User: Jon S Akhtar
* Date: Jun 12, 2010
* Time: 7:25:47 AM
*/
public class InspectionProvider implements InspectionToolProvider {
public Class[] getInspectionClasses() {
return new Class[] {
ParameterSelfInspection.class,
GlobalSelfInspection.class,
UnbalancedAssignmentInspection.class,
LuaDivideByZeroInspection.class,
LuaOverlyComplexMethodInspection.class,
LuaOverlyLongMethodInspection.class,
ArrayElementZeroInspection.class,
LuaUnreachableStatementInspection.class,
StringConcatenationInLoopsInspection.class,
RedundantInitializationInspection.class,
GlobalCreationOutsideOfMainChunk.class,
UnassignedVariableAccessInspection.class,
UnusedDefInspection.class
};
}
}
| apache-2.0 |
Gaduo/hapi-fhir | hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/codesystems/V3EntityDeterminerEnumFactory.java | 3054 | package org.hl7.fhir.dstu3.model.codesystems;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0
import org.hl7.fhir.dstu3.model.EnumFactory;
public class V3EntityDeterminerEnumFactory implements EnumFactory<V3EntityDeterminer> {
public V3EntityDeterminer fromCode(String codeString) throws IllegalArgumentException {
if (codeString == null || "".equals(codeString))
return null;
if ("INSTANCE".equals(codeString))
return V3EntityDeterminer.INSTANCE;
if ("GROUP".equals(codeString))
return V3EntityDeterminer.GROUP;
if ("KIND".equals(codeString))
return V3EntityDeterminer.KIND;
if ("GROUPKIND".equals(codeString))
return V3EntityDeterminer.GROUPKIND;
if ("QUANTIFIED_KIND".equals(codeString))
return V3EntityDeterminer.QUANTIFIEDKIND;
throw new IllegalArgumentException("Unknown V3EntityDeterminer code '"+codeString+"'");
}
public String toCode(V3EntityDeterminer code) {
if (code == V3EntityDeterminer.INSTANCE)
return "INSTANCE";
if (code == V3EntityDeterminer.GROUP)
return "GROUP";
if (code == V3EntityDeterminer.KIND)
return "KIND";
if (code == V3EntityDeterminer.GROUPKIND)
return "GROUPKIND";
if (code == V3EntityDeterminer.QUANTIFIEDKIND)
return "QUANTIFIED_KIND";
return "?";
}
public String toSystem(V3EntityDeterminer code) {
return code.getSystem();
}
}
| apache-2.0 |