repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
seoj/herd
herd-code/herd-service/src/test/java/org/finra/herd/service/activiti/task/GetBusinessObjectDataTest.java
36974
/* * Copyright 2015 herd contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.finra.herd.service.activiti.task; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.fasterxml.jackson.databind.ObjectMapper; import org.activiti.bpmn.model.FieldExtension; import org.activiti.engine.history.HistoricProcessInstance; import org.junit.Test; import org.finra.herd.model.api.xml.BusinessObjectData; import org.finra.herd.model.api.xml.Job; import org.finra.herd.model.api.xml.Parameter; import org.finra.herd.model.jpa.BusinessObjectDataEntity; import org.finra.herd.model.jpa.BusinessObjectDataStatusEntity; import org.finra.herd.model.jpa.BusinessObjectDefinitionEntity; import org.finra.herd.model.jpa.BusinessObjectFormatEntity; import org.finra.herd.model.jpa.DataProviderEntity; import org.finra.herd.model.jpa.FileTypeEntity; import org.finra.herd.model.jpa.NamespaceEntity; import org.finra.herd.model.jpa.StorageEntity; import org.finra.herd.model.jpa.StorageFileEntity; import org.finra.herd.model.jpa.StoragePlatformEntity; import org.finra.herd.model.jpa.StorageUnitEntity; import org.finra.herd.model.jpa.StorageUnitStatusEntity; import org.finra.herd.service.activiti.ActivitiRuntimeHelper; /** * <p> Test suite for Get Business Object Data Activiti wrapper. </p> * <p/> * Test plan: <ol> <li>Insert test business object data into database, where the inserted data is configurable per test case.</li> <li>Execute Activiti job * using configurable input parameters</li> <li>Validate response by converting JSON into response object upon success, or verifying ERROR status on * exceptions.</li> </ol> */ public class GetBusinessObjectDataTest extends HerdActivitiServiceTaskTest { /** * The delimiter character to use to construct sub-partition values. */ private static final char DELIMITER = '|'; /** * The escape pattern for sub-partition values delimiter. This should be used as replacement when using .replaceAll() */ private static final String DELIMITER_ESCAPE = Matcher.quoteReplacement("\\" + DELIMITER); /** * The search regex for sub-partition value delimiter. This should be used as search pattern when using .replaceAll() */ private static final String PATTERN_DELIMITER = Pattern.quote(String.valueOf(DELIMITER)); /** * The Activiti variable name suffix for JSON response. */ private static final String VARIABLE_JSON_RESPONSE = "jsonResponse"; /** * The implementation of the Activiti wrapper we are testing. */ private static final String IMPLEMENTATION = GetBusinessObjectData.class.getCanonicalName(); @Test public void test_WithSubPartitions_NoFormatVersion_NoDataVersion() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), randomString(), randomString(), randomString(), randomString()}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); String subPartitionValues = buildDelimitedSubPartitionValues(partitionValues); String partitionValue = partitionValues[0]; Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValue, subPartitionValues, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_SUCCESS, serviceTaskStatus); // Validate JSON response String jsonResponse = (String) variables.get(getServiceTaskVariableName(VARIABLE_JSON_RESPONSE)); ObjectMapper om = new ObjectMapper(); BusinessObjectData businessObjectData = om.readValue(jsonResponse.getBytes(), BusinessObjectData.class); assertEquals(namespace, businessObjectData.getNamespace()); assertEquals(businessObjectDefinitionName, businessObjectData.getBusinessObjectDefinitionName()); assertEquals(businessObjectFormatUsage, businessObjectData.getBusinessObjectFormatUsage()); assertEquals(fileTypeCode, businessObjectData.getBusinessObjectFormatFileType()); assertEquals(businessObjectFormatVersion.intValue(), businessObjectData.getBusinessObjectFormatVersion()); assertEquals(partitionKey, businessObjectData.getPartitionKey()); assertEquals(partitionValue, businessObjectData.getPartitionValue()); assertEquals(4, businessObjectData.getSubPartitionValues().size()); assertEquals(partitionValues[1], businessObjectData.getSubPartitionValues().get(0)); assertEquals(partitionValues[2], businessObjectData.getSubPartitionValues().get(1)); assertEquals(partitionValues[3], businessObjectData.getSubPartitionValues().get(2)); assertEquals(partitionValues[4], businessObjectData.getSubPartitionValues().get(3)); assertEquals(businessObjectDataVersion.intValue(), businessObjectData.getVersion()); assertEquals(true, businessObjectData.isLatestVersion()); assertEquals(1, businessObjectData.getStorageUnits().size()); assertEquals(0, businessObjectData.getAttributes().size()); assertEquals(0, businessObjectData.getBusinessObjectDataParents().size()); assertEquals(0, businessObjectData.getBusinessObjectDataChildren().size()); } @Test public void test_WithSubPartitions_NoFormatVersion_NoDataVersion_WithSubPartitionDelimiterEscapeValue() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString() + DELIMITER, randomString() + DELIMITER, randomString() + DELIMITER, randomString() + DELIMITER, randomString() + DELIMITER}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); String subPartitionValues = buildDelimitedSubPartitionValues(partitionValues); String partitionValue = partitionValues[0]; Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValue, subPartitionValues, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_SUCCESS, serviceTaskStatus); // Validate JSON response String jsonResponse = (String) variables.get(getServiceTaskVariableName(VARIABLE_JSON_RESPONSE)); ObjectMapper om = new ObjectMapper(); BusinessObjectData businessObjectData = om.readValue(jsonResponse.getBytes(), BusinessObjectData.class); assertEquals(namespace, businessObjectData.getNamespace()); assertEquals(businessObjectDefinitionName, businessObjectData.getBusinessObjectDefinitionName()); assertEquals(businessObjectFormatUsage, businessObjectData.getBusinessObjectFormatUsage()); assertEquals(fileTypeCode, businessObjectData.getBusinessObjectFormatFileType()); assertEquals(businessObjectFormatVersion.intValue(), businessObjectData.getBusinessObjectFormatVersion()); assertEquals(partitionKey, businessObjectData.getPartitionKey()); assertEquals(partitionValue, businessObjectData.getPartitionValue()); assertNotNull(businessObjectData.getSubPartitionValues()); assertEquals(4, businessObjectData.getSubPartitionValues().size()); assertEquals(partitionValues[1], businessObjectData.getSubPartitionValues().get(0)); assertEquals(partitionValues[2], businessObjectData.getSubPartitionValues().get(1)); assertEquals(partitionValues[3], businessObjectData.getSubPartitionValues().get(2)); assertEquals(partitionValues[4], businessObjectData.getSubPartitionValues().get(3)); assertEquals(businessObjectDataVersion.intValue(), businessObjectData.getVersion()); assertEquals(true, businessObjectData.isLatestVersion()); assertEquals(1, businessObjectData.getStorageUnits().size()); assertEquals(0, businessObjectData.getAttributes().size()); assertEquals(0, businessObjectData.getBusinessObjectDataParents().size()); assertEquals(0, businessObjectData.getBusinessObjectDataChildren().size()); } @Test public void test_NoSubPartitions_WithFormatVersion_WithDataVersion() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValues[0], null, businessObjectFormatVersion.toString(), businessObjectDataVersion.toString()); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_SUCCESS, serviceTaskStatus); // Validate JSON response String jsonResponse = (String) variables.get(getServiceTaskVariableName(VARIABLE_JSON_RESPONSE)); ObjectMapper om = new ObjectMapper(); BusinessObjectData businessObjectData = om.readValue(jsonResponse.getBytes(), BusinessObjectData.class); assertEquals(namespace, businessObjectData.getNamespace()); assertEquals(businessObjectDefinitionName, businessObjectData.getBusinessObjectDefinitionName()); assertEquals(businessObjectFormatUsage, businessObjectData.getBusinessObjectFormatUsage()); assertEquals(fileTypeCode, businessObjectData.getBusinessObjectFormatFileType()); assertEquals(businessObjectFormatVersion.intValue(), businessObjectData.getBusinessObjectFormatVersion()); assertEquals(partitionKey, businessObjectData.getPartitionKey()); assertEquals(partitionValues[0], businessObjectData.getPartitionValue()); assertEquals(0, businessObjectData.getSubPartitionValues().size()); assertEquals(businessObjectDataVersion.intValue(), businessObjectData.getVersion()); assertEquals(true, businessObjectData.isLatestVersion()); assertEquals(1, businessObjectData.getStorageUnits().size()); assertEquals(0, businessObjectData.getAttributes().size()); assertEquals(0, businessObjectData.getBusinessObjectDataParents().size()); assertEquals(0, businessObjectData.getBusinessObjectDataChildren().size()); } @Test public void test_NoSubPartitions_WithFormatVersion_WithDataVersion_InvalidDataVersion() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValues[0], null, businessObjectFormatVersion.toString(), "INVALID"); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_ERROR, serviceTaskStatus); } @Test public void test_NoSubPartitions_WithFormatVersion_WithDataVersion_InvalidFormatVersion() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValues[0], null, "INVALID", businessObjectDataVersion.toString()); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_ERROR, serviceTaskStatus); } @Test public void test_NoSubPartitions_NoFormatVersion_NoDataVersion() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValues[0], null, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_SUCCESS, serviceTaskStatus); // Validate JSON response String jsonResponse = (String) variables.get(getServiceTaskVariableName(VARIABLE_JSON_RESPONSE)); ObjectMapper om = new ObjectMapper(); BusinessObjectData businessObjectData = om.readValue(jsonResponse.getBytes(), BusinessObjectData.class); assertEquals(namespace, businessObjectData.getNamespace()); assertEquals(businessObjectDefinitionName, businessObjectData.getBusinessObjectDefinitionName()); assertEquals(businessObjectFormatUsage, businessObjectData.getBusinessObjectFormatUsage()); assertEquals(fileTypeCode, businessObjectData.getBusinessObjectFormatFileType()); assertEquals(businessObjectFormatVersion.intValue(), businessObjectData.getBusinessObjectFormatVersion()); assertEquals(partitionKey, businessObjectData.getPartitionKey()); assertEquals(partitionValues[0], businessObjectData.getPartitionValue()); assertEquals(0, businessObjectData.getSubPartitionValues().size()); assertEquals(businessObjectDataVersion.intValue(), businessObjectData.getVersion()); assertEquals(true, businessObjectData.isLatestVersion()); assertEquals(1, businessObjectData.getStorageUnits().size()); assertEquals(0, businessObjectData.getAttributes().size()); assertEquals(0, businessObjectData.getBusinessObjectDataParents().size()); assertEquals(0, businessObjectData.getBusinessObjectDataChildren().size()); } @Test public void test_NoSubPartitions_NoFormatVersion_NoDataVersion_InvalidPartitionKey() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, "INVALID", partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValues[0], null, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_ERROR, serviceTaskStatus); } @Test public void test_NoSubPartitions_NoFormatVersion_NoDataVersion_NoPartitionKey() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, null, partitionValues[0], null, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_SUCCESS, serviceTaskStatus); } @Test public void test_NoSubPartitions_NoFormatVersion_NoDataVersion_NoFileType() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, null, partitionKey, partitionValues[0], null, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_ERROR, serviceTaskStatus); } @Test public void test_NoSubPartitions_NoFormatVersion_NoDataVersion_NoFormatUsage() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, null, fileTypeCode, partitionKey, partitionValues[0], null, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_ERROR, serviceTaskStatus); } @Test public void test_NoSubPartitions_NoFormatVersion_NoDataVersion_NoDefinitionName() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, null, businessObjectFormatUsage, fileTypeCode, partitionKey, partitionValues[0], null, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_ERROR, serviceTaskStatus); } @Test public void test_NoSubPartitions_NoFormatVersion_NoDataVersion_InvalidPartitionValue() throws Exception { // Information that will be used for both set up data and input data String dataProviderName = randomString(); String namespace = randomString(); String businessObjectDefinitionName = randomString(); String fileTypeCode = randomString(); Integer businessObjectFormatVersion = 0; String partitionKey = randomString(); String businessObjectFormatUsage = randomString(); String[] partitionValues = {randomString(), null, null, null, null}; Integer businessObjectDataVersion = 0; // Set up database setupDatabase(namespace, dataProviderName, businessObjectDefinitionName, fileTypeCode, businessObjectFormatUsage, businessObjectFormatVersion, businessObjectDataVersion, partitionKey, partitionValues); Map<String, Object> variables = executeJob(namespace, businessObjectDefinitionName, businessObjectFormatUsage, fileTypeCode, partitionKey, "INVALID", null, null, null); // Validate status String serviceTaskStatus = (String) variables.get(getServiceTaskVariableName(ActivitiRuntimeHelper.VARIABLE_STATUS)); assertEquals(ActivitiRuntimeHelper.TASK_STATUS_ERROR, serviceTaskStatus); } /** * Constructs a random string of length 10. * * @return a random string */ private String randomString() { return ("test" + Math.random()).substring(0, 10); } /** * Inserts a business object data and their FK relationships into the database. * * @param dataProviderName the data provider name. * @param businessObjectDefinitionName the business object definition name. * @param fileTypeCode the file type code. * @param businessObjectFormatUsage the business object format usage. * @param businessObjectFormatVersion the business object format version. * @param businessObjectDataVersion the business object data version. * @param partitionKey the partition key. * @param partitionValues the partition values. */ private void setupDatabase(String namespace, String dataProviderName, String businessObjectDefinitionName, String fileTypeCode, String businessObjectFormatUsage, Integer businessObjectFormatVersion, Integer businessObjectDataVersion, String partitionKey, String[] partitionValues) { DataProviderEntity dataProviderEntity = new DataProviderEntity(); dataProviderEntity.setName(dataProviderName); herdDao.saveAndRefresh(dataProviderEntity); NamespaceEntity namespaceEntity = new NamespaceEntity(); namespaceEntity.setCode(namespace); herdDao.saveAndRefresh(namespaceEntity); BusinessObjectDefinitionEntity businessObjectDefinition = new BusinessObjectDefinitionEntity(); businessObjectDefinition.setDataProvider(dataProviderEntity); businessObjectDefinition.setName(businessObjectDefinitionName); businessObjectDefinition.setNamespace(namespaceEntity); herdDao.saveAndRefresh(businessObjectDefinition); FileTypeEntity fileTypeEntity = new FileTypeEntity(); fileTypeEntity.setCode(fileTypeCode); herdDao.saveAndRefresh(fileTypeEntity); BusinessObjectFormatEntity businessObjectFormatEntity = new BusinessObjectFormatEntity(); businessObjectFormatEntity.setBusinessObjectDefinition(businessObjectDefinition); businessObjectFormatEntity.setBusinessObjectFormatVersion(businessObjectFormatVersion); businessObjectFormatEntity.setFileType(fileTypeEntity); businessObjectFormatEntity.setLatestVersion(true); businessObjectFormatEntity.setNullValue("#"); businessObjectFormatEntity.setPartitionKey(partitionKey); businessObjectFormatEntity.setUsage(businessObjectFormatUsage); herdDao.saveAndRefresh(businessObjectFormatEntity); StoragePlatformEntity storagePlatformEntity = new StoragePlatformEntity(); storagePlatformEntity.setName(randomString()); herdDao.saveAndRefresh(storagePlatformEntity); StorageEntity storageEntity = new StorageEntity(); storageEntity.setName(randomString()); storageEntity.setStoragePlatform(storagePlatformEntity); herdDao.saveAndRefresh(storageEntity); BusinessObjectDataStatusEntity businessObjectDataStatusEntity = new BusinessObjectDataStatusEntity(); businessObjectDataStatusEntity.setCode(BusinessObjectDataStatusEntity.VALID); BusinessObjectDataEntity businessObjectDataEntity = new BusinessObjectDataEntity(); businessObjectDataEntity.setBusinessObjectFormat(businessObjectFormatEntity); businessObjectDataEntity.setLatestVersion(true); businessObjectDataEntity.setPartitionValue(partitionValues[0]); businessObjectDataEntity.setPartitionValue2(partitionValues[1]); businessObjectDataEntity.setPartitionValue3(partitionValues[2]); businessObjectDataEntity.setPartitionValue4(partitionValues[3]); businessObjectDataEntity.setPartitionValue5(partitionValues[4]); businessObjectDataEntity.setStatus(businessObjectDataStatusEntity); Collection<StorageUnitEntity> storageUnits = new ArrayList<>(); StorageUnitEntity storageUnitEntity = new StorageUnitEntity(); storageUnitEntity.setStorage(storageEntity); Collection<StorageFileEntity> storageFiles = new ArrayList<>(); StorageFileEntity storageFileEntity = new StorageFileEntity(); storageFileEntity.setPath(randomString()); storageFileEntity.setFileSizeBytes(1000l); storageFileEntity.setStorageUnit(storageUnitEntity); storageFiles.add(storageFileEntity); storageUnitEntity.setStorageFiles(storageFiles); storageUnitEntity.setBusinessObjectData(businessObjectDataEntity); storageUnitEntity.setStatus(herdDao.getStorageUnitStatusByCode(StorageUnitStatusEntity.ENABLED)); storageUnits.add(storageUnitEntity); businessObjectDataEntity.setStorageUnits(storageUnits); businessObjectDataEntity.setVersion(businessObjectDataVersion); herdDao.saveAndRefresh(businessObjectDataEntity); } /** * Executes the Activiti job with the given parameters and returns variables. The parameters are as defined in the documentation. * * @param businessObjectDefinitionName the business object definition name. * @param businessObjectFormatUsage the business object format usage. * @param fileTypeCode the file type code. * @param partitionKey the partition key. * @param partitionValue the partition value. * @param subPartitionValues the sub-partition values. * @param businessObjectFormatVersion the business object format version (optional). * @param businessObjectDataVersion the business object data version (optional). * * @return map of variable name to variable value * @throws Exception */ private Map<String, Object> executeJob(String namespace, String businessObjectDefinitionName, String businessObjectFormatUsage, String fileTypeCode, String partitionKey, String partitionValue, String subPartitionValues, String businessObjectFormatVersion, String businessObjectDataVersion) throws Exception { // Prepare input data List<FieldExtension> fieldExtensionList = new ArrayList<>(); if (namespace != null) { fieldExtensionList.add(buildFieldExtension("namespace", "${businessObjectDefinitionNamespace}")); } fieldExtensionList.add(buildFieldExtension("businessObjectDefinitionName", "${businessObjectDefinitionName}")); fieldExtensionList.add(buildFieldExtension("businessObjectFormatUsage", "${businessObjectFormatUsage}")); fieldExtensionList.add(buildFieldExtension("businessObjectFormatFileType", "${businessObjectFormatFileType}")); fieldExtensionList.add(buildFieldExtension("partitionKey", "${partitionKey}")); fieldExtensionList.add(buildFieldExtension("partitionValue", "${partitionValue}")); fieldExtensionList.add(buildFieldExtension("subPartitionValues", "${subPartitionValues}")); fieldExtensionList.add(buildFieldExtension("businessObjectFormatVersion", "${businessObjectFormatVersion}")); fieldExtensionList.add(buildFieldExtension("businessObjectDataVersion", "${businessObjectDataVersion}")); List<Parameter> parameters = new ArrayList<>(); if (namespace != null) { parameters.add(buildParameter("businessObjectDefinitionNamespace", namespace)); } parameters.add(buildParameter("businessObjectDefinitionName", businessObjectDefinitionName)); parameters.add(buildParameter("businessObjectFormatUsage", businessObjectFormatUsage)); parameters.add(buildParameter("businessObjectFormatFileType", fileTypeCode)); parameters.add(buildParameter("partitionKey", partitionKey)); parameters.add(buildParameter("partitionValue", partitionValue)); parameters.add(buildParameter("subPartitionValues", subPartitionValues)); parameters.add(buildParameter("businessObjectFormatVersion", businessObjectFormatVersion)); parameters.add(buildParameter("businessObjectDataVersion", businessObjectDataVersion)); String activitiXml = buildActivitiXml(IMPLEMENTATION, fieldExtensionList); // Execute job Job job = createJobForCreateClusterForActivitiXml(activitiXml, parameters); assertNotNull(job); HistoricProcessInstance hisInstance = activitiHistoryService.createHistoricProcessInstanceQuery().processInstanceId(job.getId()).includeProcessVariables().singleResult(); return hisInstance.getProcessVariables(); } /** * Constructs a delimited sub-partition values. If a delimiter character is present in the value, the delimiter is escaped. The given partition values * includes both the primary partition value and the sub-partition values, that is, the given array should look like {@code [primary, subpv1, subpv2, * subpv3, subpv4]}. * * @param partitionValues the partition values. * * @return delimited sub-partition values * @throws IllegalArgumentException when given partition values' size is 1 or less. */ private String buildDelimitedSubPartitionValues(String[] partitionValues) { if (partitionValues.length <= 1) { throw new IllegalArgumentException("partitionValues.length must be greater than 1"); } StringBuilder subPartitionValuesBuilder = new StringBuilder(); for (int i = 1; i < partitionValues.length; i++) { if (i > 1) { subPartitionValuesBuilder.append(DELIMITER); } String partitionValue = partitionValues[i]; partitionValue = partitionValue.replaceAll(PATTERN_DELIMITER, DELIMITER_ESCAPE); subPartitionValuesBuilder.append(partitionValue); } return subPartitionValuesBuilder.toString(); } }
apache-2.0
adrianromero/data
src/main/java/com/adr/data/DataException.java
1120
// Data Access is a Java library to store data // Copyright (C) 2017-2019 Adrián Romero Corchado. // // This file is part of Data Access // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.adr.data; public class DataException extends Exception { private static final long serialVersionUID = 8719551478674716662L; public DataException(String msg) { super(msg); } public DataException(Throwable t) { super(t); } public DataException(String msg, Throwable t) { super(msg, t); } }
apache-2.0
mike10004/appengine-imaging
gaecompat-awt-imaging/src/awt/com/google/code/appengine/awt/font/LineMetrics.java
1487
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Ilya S. Okomin */ package com.google.code.appengine.awt.font; public abstract class LineMetrics { public abstract float[] getBaselineOffsets(); public abstract int getNumChars(); public abstract int getBaselineIndex(); public abstract float getUnderlineThickness(); public abstract float getUnderlineOffset(); public abstract float getStrikethroughThickness(); public abstract float getStrikethroughOffset(); public abstract float getLeading(); public abstract float getHeight(); public abstract float getDescent(); public abstract float getAscent(); }
apache-2.0
DataTorrent/Megh
contrib/src/test/java/com/datatorrent/contrib/hdht/HDHTBenchmarkTest.java
8604
/** * Copyright (c) 2016 DataTorrent, Inc. ALL Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.contrib.hdht; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.nio.ByteBuffer; import java.util.Random; import java.util.TreeMap; import org.junit.Assert; import org.junit.Rule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import com.google.common.collect.Lists; import com.datatorrent.api.Context; import com.datatorrent.api.Context.OperatorContext; import com.datatorrent.api.Context.PortContext; import com.datatorrent.api.DAG; import com.datatorrent.api.DAG.Locality; import com.datatorrent.api.DefaultOutputPort; import com.datatorrent.api.InputOperator; import com.datatorrent.api.LocalMode; import com.datatorrent.api.Operator; import com.datatorrent.api.Stats.OperatorStats; import com.datatorrent.api.StatsListener; import com.datatorrent.api.StreamingApplication; import com.datatorrent.api.annotation.ApplicationAnnotation; import com.datatorrent.common.util.BaseOperator; import com.datatorrent.contrib.hdht.hfile.HFileImpl; import com.datatorrent.lib.fileaccess.FileAccessFSImpl; import com.datatorrent.lib.util.KeyValPair; import com.datatorrent.lib.util.TestUtils; import com.datatorrent.netlet.util.Slice; @ApplicationAnnotation(name = "HDHTBenchmarkTest") public class HDHTBenchmarkTest implements StreamingApplication { @Override public void populateDAG(DAG dag, Configuration conf) { TestStatsListener sl = new TestStatsListener(); sl.adjustRate = conf.getBoolean("dt.hdsbench.adjustRate", false); TestGenerator gen = dag.addOperator("Generator", new TestGenerator()); dag.setAttribute(gen, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl)); TestStoreOperator store = dag.addOperator("Store", new TestStoreOperator()); dag.setAttribute(store, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl)); FileAccessFSImpl hfa = new HFileImpl(); hfa.setBasePath(this.getClass().getSimpleName()); store.setFileStore(hfa); dag.setInputPortAttribute(store.input, PortContext.PARTITION_PARALLEL, true); dag.getOperatorMeta("Store").getAttributes().put(Context.OperatorContext.COUNTERS_AGGREGATOR, new HDHTWriter.BucketIOStatAggregator()); dag.addStream("Events", gen.data, store.input).setLocality(Locality.THREAD_LOCAL); } public static class TestGenerator extends BaseOperator implements InputOperator { public final transient DefaultOutputPort<KeyValPair<byte[], byte[]>> data = new DefaultOutputPort<KeyValPair<byte[], byte[]>>(); int emitBatchSize = 1000; byte[] val = ByteBuffer.allocate(1000).putLong(1234).array(); int rate = 20000; int emitCount = 0; private final Random random = new Random(); private int range = 1000 * 60; // one minute range of hot keys public int getEmitBatchSize() { return emitBatchSize; } public void setEmitBatchSize(int emitBatchSize) { this.emitBatchSize = emitBatchSize; } public int getRate() { return rate; } public void setRate(int rate) { this.rate = rate; } public int getRange() { return range; } public void setRange(int range) { this.range = range; } @Override public void beginWindow(long windowId) { super.beginWindow(windowId); emitCount = 0; } @Override public void emitTuples() { long timestamp = System.currentTimeMillis(); for (int i = 0; i < emitBatchSize && emitCount < rate; i++) { byte[] key = ByteBuffer.allocate(16).putLong((timestamp - timestamp % range) + random.nextInt(range)).putLong(i).array(); data.emit(new KeyValPair<byte[], byte[]>(key, val)); emitCount++; } } } public static class TestStoreOperator extends HDHTTestOperator { @Override protected void processEvent(KeyValPair<byte[], byte[]> event) throws IOException { super.processEvent(event); } } public static class TestStatsListener implements StatsListener, Serializable { private static final Logger LOG = LoggerFactory.getLogger(TestStatsListener.class); private static final long serialVersionUID = 1L; SetPropertyRequest cmd = new SetPropertyRequest(); long uwId; long dwId; long resumewid; int rate; int queueSize; boolean adjustRate; @Override public Response processStats(BatchedOperatorStats stats) { if (!stats.getLastWindowedStats().isEmpty()) { OperatorStats os = stats.getLastWindowedStats().get(stats.getLastWindowedStats().size() - 1); if (os.inputPorts != null && !os.inputPorts.isEmpty()) { dwId = os.windowId; queueSize = os.inputPorts.get(0).queueSize; if (uwId - dwId < 5) { // keep operator busy rate = Math.max(1000, rate); rate += rate / 10; } else if (uwId - dwId > 20) { // operator is behind if (resumewid < dwId) { resumewid = uwId - 15; rate -= rate / 10; } } } else { LOG.debug("uwid-dwid {} skip {} rate {}, queueSize {}", uwId - dwId, resumewid - dwId, rate, queueSize); // upstream operator uwId = os.windowId; if (adjustRate) { Response rsp = new Response(); cmd.rate = resumewid < dwId ? rate : 0; rsp.operatorRequests = Lists.newArrayList(cmd); return rsp; } } } return null; } public static class SetPropertyRequest implements OperatorRequest, Serializable { private static final long serialVersionUID = 1L; int rate; @Override public OperatorResponse execute(Operator oper, int arg1, long arg2) throws IOException { if (oper instanceof TestGenerator) { LOG.debug("Setting rate to {}", rate); ((TestGenerator)oper).rate = rate; } return null; } } } @Rule public final TestUtils.TestInfo testInfo = new TestUtils.TestInfo(); //@Test public void test() throws Exception { File file = new File(testInfo.getDir()); FileUtils.deleteDirectory(file); LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); conf.set("dt.operator.Store.fileStore.basePath", file.toURI().toString()); //conf.set("dt.operator.Store.flushSize", "0"); conf.set("dt.operator.Store.flushIntervalCount", "1"); conf.set("dt.operator.Generator.attr.PARTITIONER", "com.datatorrent.lib.partitioner.StatelessPartitioner:2"); lma.prepareDAG(new HDHTAppTest(), conf); LocalMode.Controller lc = lma.getController(); lc.setHeartbeatMonitoringEnabled(false); lc.runAsync(); long tms = System.currentTimeMillis(); File f0 = new File(file, "0/0-0"); File f1 = new File(file, "1/1-0"); File wal0 = new File(file, "0/_WAL-0"); File wal1 = new File(file, "1/_WAL-0"); while (System.currentTimeMillis() - tms < 30000) { if (f0.exists() && f1.exists()) { break; } Thread.sleep(100); } lc.shutdown(); Assert.assertTrue("exists " + f0, f0.exists() && f0.isFile()); Assert.assertTrue("exists " + f1, f1.exists() && f1.isFile()); Assert.assertTrue("exists " + wal0, wal0.exists() && wal0.exists()); Assert.assertTrue("exists " + wal1, wal1.exists() && wal1.exists()); FileAccessFSImpl fs = new MockFileAccess(); fs.setBasePath(file.toURI().toString()); fs.init(); TreeMap<Slice, Slice> data = new TreeMap<>(new HDHTWriterTest.SequenceComparator()); fs.getReader(0, "0-0").readFully(data); Assert.assertFalse(data.isEmpty()); data.clear(); fs.getReader(1, "1-0").readFully(data); Assert.assertFalse(data.isEmpty()); fs.close(); } }
apache-2.0
yiwent/Mobike
app/src/main/java/com/yiwen/mobike/map/AllInterface.java
415
package com.yiwen.mobike.map; /** * Created by gaolei on 17/1/6. */ public class AllInterface { public interface OnMenuSlideListener{ void onMenuSlide(float offset); } public interface IUnlock{ void onUnlock(); } public interface IUpdateLocation{ void updateLocation(String totalTime, String totalDistance); void endLocation(); } }
apache-2.0
spring-projects/spring-data-examples
jpa/deferred/src/main/java/example/service/Customer468Service.java
221
package example.service; import example.repo.Customer468Repository; import org.springframework.stereotype.Service; @Service public class Customer468Service { public Customer468Service(Customer468Repository repo) {} }
apache-2.0
gurhann/AutoTimeTable
optaplanner-webexamples/src/main/java/org/optaplanner/curriculumcourse/edit_view/courses/CoursesViewServlet.java
1979
/* * Copyright 2015 JBoss by Red Hat. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.curriculumcourse.edit_view.courses; import java.io.IOException; import java.util.List; import javax.persistence.EntityManager; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.optaplanner.curriculumcourse.dao.CourseDao; import org.optaplanner.curriculumcourse.dao.CurriculumDao; import org.optaplanner.curriculumcourse.dao.TeacherDao; import org.optaplanner.curriculumcourse.service.CourseService; import org.optaplanner.examples.curriculumcourse.domain.Course; import org.optaplanner.examples.curriculumcourse.domain.Curriculum; import org.optaplanner.examples.curriculumcourse.domain.Teacher; /** * * @author gurhan */ @WebServlet("/curriculumcourse/CoursesViewServlet") public class CoursesViewServlet extends CoursesServlet{ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { courseService = new CourseService(req, resp); courseService.allCourseList(); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { doGet(req, resp); } }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-kinesis/src/main/java/com/amazonaws/services/kinesisanalytics/model/transform/AddApplicationInputRequestMarshaller.java
2799
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kinesisanalytics.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.kinesisanalytics.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * AddApplicationInputRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class AddApplicationInputRequestMarshaller { private static final MarshallingInfo<String> APPLICATIONNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ApplicationName").build(); private static final MarshallingInfo<Long> CURRENTAPPLICATIONVERSIONID_BINDING = MarshallingInfo.builder(MarshallingType.LONG) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("CurrentApplicationVersionId").build(); private static final MarshallingInfo<StructuredPojo> INPUT_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Input").build(); private static final AddApplicationInputRequestMarshaller instance = new AddApplicationInputRequestMarshaller(); public static AddApplicationInputRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(AddApplicationInputRequest addApplicationInputRequest, ProtocolMarshaller protocolMarshaller) { if (addApplicationInputRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(addApplicationInputRequest.getApplicationName(), APPLICATIONNAME_BINDING); protocolMarshaller.marshall(addApplicationInputRequest.getCurrentApplicationVersionId(), CURRENTAPPLICATIONVERSIONID_BINDING); protocolMarshaller.marshall(addApplicationInputRequest.getInput(), INPUT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
oehme/analysing-gradle-performance
my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p330/Test6603.java
2174
package org.gradle.test.performance.mediummonolithicjavaproject.p330; import org.junit.Test; import static org.junit.Assert.*; public class Test6603 { Production6603 objectUnderTest = new Production6603(); @Test public void testProperty0() { Production6600 value = new Production6600(); objectUnderTest.setProperty0(value); assertEquals(value, objectUnderTest.getProperty0()); } @Test public void testProperty1() { Production6601 value = new Production6601(); objectUnderTest.setProperty1(value); assertEquals(value, objectUnderTest.getProperty1()); } @Test public void testProperty2() { Production6602 value = new Production6602(); objectUnderTest.setProperty2(value); assertEquals(value, objectUnderTest.getProperty2()); } @Test public void testProperty3() { String value = "value"; objectUnderTest.setProperty3(value); assertEquals(value, objectUnderTest.getProperty3()); } @Test public void testProperty4() { String value = "value"; objectUnderTest.setProperty4(value); assertEquals(value, objectUnderTest.getProperty4()); } @Test public void testProperty5() { String value = "value"; objectUnderTest.setProperty5(value); assertEquals(value, objectUnderTest.getProperty5()); } @Test public void testProperty6() { String value = "value"; objectUnderTest.setProperty6(value); assertEquals(value, objectUnderTest.getProperty6()); } @Test public void testProperty7() { String value = "value"; objectUnderTest.setProperty7(value); assertEquals(value, objectUnderTest.getProperty7()); } @Test public void testProperty8() { String value = "value"; objectUnderTest.setProperty8(value); assertEquals(value, objectUnderTest.getProperty8()); } @Test public void testProperty9() { String value = "value"; objectUnderTest.setProperty9(value); assertEquals(value, objectUnderTest.getProperty9()); } }
apache-2.0
kaioz/optus
app/src/main/java/com/cocosw/optus/model/Route.java
311
package com.cocosw.optus.model; import java.util.HashMap; public class Route implements java.io.Serializable { private static final long serialVersionUID = 2599929198439885616L; public int id; public RouteLocation location; public String name; public HashMap<String,String> fromcentral; }
apache-2.0
YTestGithub/FourGank
app/src/main/java/com/yibh/fourgank/yonionyy/mvp_logintest/model/User.java
288
package com.yibh.fourgank.yonionyy.mvp_logintest.model; /** * Created by yibh on 2016/9/1 17:51. */ public class User { public String mName; public String mPassWord; public User(String name, String passWord) { mName = name; mPassWord = passWord; } }
apache-2.0
morungos/turalt-openmentor
src/test/java/com/turalt/openmentor/test/AbstractShiroTest.java
2347
package com.turalt.openmentor.test; import org.apache.shiro.SecurityUtils; import org.apache.shiro.UnavailableSecurityManagerException; import org.apache.shiro.mgt.SecurityManager; import org.apache.shiro.subject.Subject; import org.apache.shiro.subject.support.SubjectThreadState; import org.apache.shiro.util.LifecycleUtils; import org.apache.shiro.util.ThreadState; import org.junit.AfterClass; /** * Abstract test case enabling Shiro in test environments. */ public abstract class AbstractShiroTest { private static ThreadState subjectThreadState; public AbstractShiroTest() { } /** * Allows subclasses to set the currently executing {@link Subject} instance. * * @param subject the Subject instance */ protected void setSubject(Subject subject) { clearSubject(); subjectThreadState = createThreadState(subject); subjectThreadState.bind(); } protected Subject getSubject() { return SecurityUtils.getSubject(); } protected ThreadState createThreadState(Subject subject) { return new SubjectThreadState(subject); } /** * Clears Shiro's thread state, ensuring the thread remains clean for future test execution. */ protected void clearSubject() { doClearSubject(); } private static void doClearSubject() { if (subjectThreadState != null) { subjectThreadState.clear(); subjectThreadState = null; } } protected static void setSecurityManager(SecurityManager securityManager) { SecurityUtils.setSecurityManager(securityManager); } protected static SecurityManager getSecurityManager() { return SecurityUtils.getSecurityManager(); } @AfterClass public static void tearDownShiro() { doClearSubject(); try { SecurityManager securityManager = getSecurityManager(); LifecycleUtils.destroy(securityManager); } catch (UnavailableSecurityManagerException e) { //we don't care about this when cleaning up the test environment //(for example, maybe the subclass is a unit test and it didn't // need a SecurityManager instance because it was using only // mock Subject instances) } setSecurityManager(null); } }
apache-2.0
xiongyou/ebmis
src/main/java/com/cqu/edu/ebmis/ebmis/parent/App.java
193
package com.cqu.edu.ebmis.ebmis.parent; /** * Hello world! * */ public class App { public static void main( String[] args ) { System.out.println( "Hello World!" ); } }
apache-2.0
zhongying1995/MineWeather
app/src/main/java/com/zhongying/mineweather/utily/Utilies.java
4732
package com.zhongying.mineweather.utily; import android.text.TextUtils; import com.google.gson.Gson; import com.zhongying.mineweather.R; import com.zhongying.mineweather.constant.Constant; import com.zhongying.mineweather.gson.HeWeather; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.HashMap; import java.util.Map; /** * @class: 工具类 * Created by Administrator on 2017/9/25. */ public class Utilies { private static Map<String,Integer> mIcons; /** * 构造函数,初始化某些数据 */ public Utilies(){ } /** * @function: 解析返回的天气数据 * @param response 返回的reponse的string * @return 返回天气类 HeWeather */ public static HeWeather handleWeatherResponse(String response){ try { JSONObject jsonObject = new JSONObject(response); JSONArray jsonArray = jsonObject.getJSONArray("HeWeather5"); String weatherContent = jsonArray.getJSONObject(0).toString(); return new Gson().fromJson(weatherContent,HeWeather.class); } catch (JSONException e) { e.printStackTrace(); } return null; } /** * @function: 组合成一个合法的url请求 * @param: 对应的weatherId * @return: 正确的url地址 * https://free-api.heweather.com/v5/weather?city=yourcity&key=yourkey */ public static String getWeatherUrl(String weatherId){ return "https://free-api.heweather.com/v5/weather?city=" +weatherId+"&key="+ Constant.URLKEY_FOR_HEWEATHER; } /** * @function: 获取天气图标对应的图标id * @param code 天气图标的 代码 eg: 100 * @return 返回该图标的id */ public static int getWeatherIconId(String code){ if(code==null || TextUtils.isEmpty(code)){ return -1; } if(mIcons == null || mIcons.size()==0){ mIcons = new HashMap<>(); mIcons.clear(); initWeatherIconsId(); } return mIcons.get(code); } private static void initWeatherIconsId(){ mIcons.put("100", R.mipmap.icon100); mIcons.put("101", R.mipmap.icon101); mIcons.put("102", R.mipmap.icon102); mIcons.put("103", R.mipmap.icon103); mIcons.put("104", R.mipmap.icon104); mIcons.put("200", R.mipmap.icon200); mIcons.put("201", R.mipmap.icon201); mIcons.put("202", R.mipmap.icon202); mIcons.put("203", R.mipmap.icon203); mIcons.put("204", R.mipmap.icon204); mIcons.put("205", R.mipmap.icon205); mIcons.put("206", R.mipmap.icon206); mIcons.put("207", R.mipmap.icon207); mIcons.put("208", R.mipmap.icon208); mIcons.put("209", R.mipmap.icon209); mIcons.put("210", R.mipmap.icon210); mIcons.put("211", R.mipmap.icon211); mIcons.put("212", R.mipmap.icon212); mIcons.put("213", R.mipmap.icon213); mIcons.put("300", R.mipmap.icon300); mIcons.put("301", R.mipmap.icon301); mIcons.put("302", R.mipmap.icon302); mIcons.put("303", R.mipmap.icon303); mIcons.put("304", R.mipmap.icon304); mIcons.put("305", R.mipmap.icon305); mIcons.put("306", R.mipmap.icon306); mIcons.put("307", R.mipmap.icon307); mIcons.put("308", R.mipmap.icon308); mIcons.put("309", R.mipmap.icon309); mIcons.put("310", R.mipmap.icon310); mIcons.put("311", R.mipmap.icon311); mIcons.put("312", R.mipmap.icon312); mIcons.put("313", R.mipmap.icon313); mIcons.put("400", R.mipmap.icon400); mIcons.put("401", R.mipmap.icon401); mIcons.put("402", R.mipmap.icon402); mIcons.put("403", R.mipmap.icon403); mIcons.put("404", R.mipmap.icon404); mIcons.put("405", R.mipmap.icon405); mIcons.put("406", R.mipmap.icon406); mIcons.put("407", R.mipmap.icon407); mIcons.put("500", R.mipmap.icon500); mIcons.put("501", R.mipmap.icon501); mIcons.put("502", R.mipmap.icon502); mIcons.put("503", R.mipmap.icon503); mIcons.put("504", R.mipmap.icon504); mIcons.put("507", R.mipmap.icon507); mIcons.put("508", R.mipmap.icon508); mIcons.put("900", R.mipmap.icon900); mIcons.put("901", R.mipmap.icon901); mIcons.put("999", R.mipmap.icon999); } public static boolean isWeatherResponseAvailable(HeWeather weather){ return weather!=null && "ok".equals(weather.status); } public static float getSystemCurrentTime(){ return System.currentTimeMillis(); } }
apache-2.0
distribuitech/datos
datos-vfs/src/main/java/com/datos/vfs/provider/VfsComponent.java
1766
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datos.vfs.provider; import com.datos.vfs.FileSystem; import org.apache.commons.logging.Log; import com.datos.vfs.FileSystemException; /** * This interface is used to manage the lifecycle of all VFS components. * This includes all implementations of the following interfaces: * <ul> * <li>{@link FileProvider} * <li>{@link FileSystem} * <li>{@link FileReplicator} * <li>{@link TemporaryFileStore} * </ul> */ public interface VfsComponent { /** * Sets the Logger to use for the component. * * @param logger The Log */ void setLogger(Log logger); /** * Sets the context for the component. * * @param context The context. */ void setContext(VfsComponentContext context); /** * Initializes the component. * @throws FileSystemException if an error occurs. */ void init() throws FileSystemException; /** * Closes the component. */ void close(); }
apache-2.0
plannifico/Plannifico
src/main/java/org/plannifico/server/response/StatusResponse.java
841
/*Copyright 2014 Rosario Alfano Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.*/ package org.plannifico.server.response; public class StatusResponse implements ResponseContent { private String engineStatus; public StatusResponse (int status) { this.engineStatus = ""+ status; } public String getStatus () { return this.engineStatus; } }
apache-2.0
qingchengfit/QcWidgets
widgets/src/main/java/cn/qingchengfit/widgets/PasswordView.java
6170
package cn.qingchengfit.widgets; import android.content.Context; import android.content.res.TypedArray; import android.text.InputType; import android.util.AttributeSet; import android.view.View; import android.widget.EditText; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import cn.qingchengfit.utils.ToastUtils; /** * TODO: document your custom view class. */ public class PasswordView extends LinearLayout implements View.OnClickListener { private boolean isPwMode = true; private EditText mNum; private TextView mBtnGetCode; private ImageView mImgLeft; private TextView mTvPw; private AnimatedButton mOpenEye; private boolean mBlock = false; private OnClickListener mOnClickListener; private boolean isShowLeft = true; private boolean noNull = false; private TextView noNullTv; public PasswordView(Context context) { super(context); setOrientation(LinearLayout.HORIZONTAL); inflate(context, R.layout.layout_password, this); init(null, 0); onFinishInflate(); } public PasswordView(Context context, AttributeSet attrs) { super(context, attrs); setOrientation(LinearLayout.HORIZONTAL); inflate(context, R.layout.layout_password, this); init(attrs, 0); onFinishInflate(); } public PasswordView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); setOrientation(LinearLayout.HORIZONTAL); inflate(context, R.layout.layout_password, this); init(attrs, defStyle); onFinishInflate(); } private void init(AttributeSet attrs, int defStyle) { // LayoutInflater.from(context).inflate(R.layout.layout_login_edittext, this, true); // Load attributes final TypedArray a = getContext().obtainStyledAttributes( attrs, R.styleable.PasswordView, defStyle, 0); isPwMode = a.getBoolean(R.styleable.PasswordView_pw_mode, true); isShowLeft = a.getBoolean(R.styleable.PasswordView_pw_show_left, true); noNull = a.getBoolean(R.styleable.PasswordView_pw_nonull,false); a.recycle(); if (isInEditMode()) { return; } } @Override public void setOnClickListener(OnClickListener onClickListener) { mOnClickListener = onClickListener; } public boolean checkValid() { String text = mNum.getText().toString().trim(); if (isPwMode) { if (text.length() < 6) { ToastUtils.show(getResources().getString(R.string.err_password_length)); return false; } } else { if (text.length() < 6) { ToastUtils.show(getResources().getString(R.string.err_checkcode_length)); return false; } } return true; } public String getCode() { return mNum.getText().toString().trim(); } @Override protected void onFinishInflate() { super.onFinishInflate(); mImgLeft = (ImageView) findViewById(R.id.img_pw); mNum = (EditText) findViewById(R.id.et_pw); mBtnGetCode = (TextView) findViewById(R.id.btn_getcode); mTvPw = (TextView) findViewById(R.id.tv_password); noNullTv = (TextView) findViewById(R.id.nonull_tv); mOpenEye = (AnimatedButton) findViewById(R.id.open_eye); mBtnGetCode.setOnClickListener(this); mOpenEye.setChecked(false); mImgLeft.setVisibility(isShowLeft ? VISIBLE : GONE); mImgLeft.setImageResource(isPwMode?R.drawable.ic_login_password:R.drawable.ic_login_verifycode); noNullTv.setVisibility(noNull?VISIBLE:GONE); if (isPwMode) { mBtnGetCode.setVisibility(GONE); mTvPw.setText(R.string.login_pw); mOpenEye.setVisibility(VISIBLE); mNum.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_PASSWORD); } else { mBtnGetCode.setText(getResources().getString(R.string.login_getcode)); mTvPw.setText(R.string.phone_code); mOpenEye.setVisibility(GONE); mBtnGetCode.setVisibility(VISIBLE); } mOpenEye.setOnClickListener(this); } public boolean isPwMode() { return isPwMode; } public void setRightText(String text) { mBtnGetCode.setText(text); } public void blockRightClick(boolean b) { mBlock = b; } public void toggle() { isPwMode = !isPwMode; mNum.setText(""); mImgLeft.setImageResource(isPwMode?R.drawable.ic_login_password:R.drawable.ic_login_verifycode); if (isPwMode) { mBtnGetCode.setVisibility(GONE); mTvPw.setText(R.string.login_pw); mOpenEye.setVisibility(VISIBLE); mNum.setInputType(InputType.TYPE_CLASS_TEXT | (mOpenEye.isChecked() ? InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD : InputType.TYPE_TEXT_VARIATION_PASSWORD)); } else { mBtnGetCode.setVisibility(VISIBLE); mNum.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_CLASS_NUMBER); mBtnGetCode.setText(getResources().getString(R.string.login_getcode)); mTvPw.setText(R.string.phone_code); mOpenEye.setVisibility(GONE); mBlock = false; } } @Override public void onClick(View v) { if (v.getId() == R.id.btn_getcode) { if (!mBlock) { if (mOnClickListener != null) mOnClickListener.onClick(v); } } else if (v.getId() == R.id.open_eye) { mOpenEye.toggle(); if (mOpenEye.isChecked()) { //开眼 mNum.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD); } else { //关闭 mNum.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_PASSWORD); } mNum.setSelection(mNum.getText().length()); } } }
apache-2.0
vovagrechka/fucking-everything
phizdets/phizdets-idea/eclipse-src/org.eclipse.php.debug.core/src/org/eclipse/php/internal/debug/core/xdebug/breakpoints/DBGpExceptionBreakpoint.java
2163
/******************************************************************************* * Copyright (c) 2015 Zend Technologies and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Zend Technologies - initial API and implementation *******************************************************************************/ package org.eclipse.php.internal.debug.core.xdebug.breakpoints; import org.eclipse.core.resources.IFile; import org.eclipse.debug.core.model.IBreakpoint; import org.eclipse.php.internal.debug.core.model.IPHPExceptionBreakpoint; import org.eclipse.php.internal.debug.core.xdebug.dbgp.DBGpBreakpoint; /** * DBGp exception breakpoint. * * @author Bartlomiej Laczkowski */ public class DBGpExceptionBreakpoint implements DBGpBreakpoint { private IPHPExceptionBreakpoint bp; /** * Creates new DBGp exception breakpoint. */ public DBGpExceptionBreakpoint(IPHPExceptionBreakpoint bp) { this.bp = bp; } @Override public int getID() { return 0; } @Override public void setID(int id) { // ignore } @Override public IBreakpoint getBreakpoint() { return bp; } @Override public void setBreakpoint(IBreakpoint breakpoint) { // ignore } @Override public IFile getIFile() { return null; } @Override public String getFileName() { return null; } @Override public int getLineNumber() { return 0; } @Override public boolean isException() { return true; } @Override public boolean isConditional() { return false; } @Override public boolean isConditionEnabled() { return false; } @Override public boolean hasConditionChanged() { return false; } @Override public void resetConditionChanged() { // ignore } @Override public String getExpression() { return null; } @Override public String getException() { return bp.getExceptionName(); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-appstream/src/main/java/com/amazonaws/services/appstream/model/transform/UpdateStackResultJsonUnmarshaller.java
2769
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.appstream.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.appstream.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * UpdateStackResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateStackResultJsonUnmarshaller implements Unmarshaller<UpdateStackResult, JsonUnmarshallerContext> { public UpdateStackResult unmarshall(JsonUnmarshallerContext context) throws Exception { UpdateStackResult updateStackResult = new UpdateStackResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return updateStackResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("Stack", targetDepth)) { context.nextToken(); updateStackResult.setStack(StackJsonUnmarshaller.getInstance().unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return updateStackResult; } private static UpdateStackResultJsonUnmarshaller instance; public static UpdateStackResultJsonUnmarshaller getInstance() { if (instance == null) instance = new UpdateStackResultJsonUnmarshaller(); return instance; } }
apache-2.0
rashidaligee/kylo
metadata/metadata-api/src/main/java/com/thinkbiganalytics/metadata/api/BaseProvider.java
1004
package com.thinkbiganalytics.metadata.api; /*- * #%L * thinkbig-metadata-api * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.Serializable; import java.util.List; /** */ public interface BaseProvider<T, PK extends Serializable> { T create(T t); T findById(PK id); List<T> findAll(); T update(T t); void delete(T t); void deleteById(PK id); PK resolveId(Serializable fid); }
apache-2.0
submergerock/avatar-hadoop
build/hadoop-0.20.1-dev/src/hdfs/org/apache/hadoop/hdfs/job/JobProtocol.java
1950
package org.apache.hadoop.hdfs.job; /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.IOException; import java.util.HashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.job.Job; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.ipc.VersionedProtocol; /** An client-datanode protocol for block recovery */ public interface JobProtocol { //public static final Log LOG = LogFactory.getLog(JobProtocol.class); /** * 3: add keepLength parameter. */ public static final long versionID = 3L; public void setConfiguration(Configuration conf); public void setNameNode(NameNode namenode); public void setDataNode(DataNode datanode); public void setJob(Job j); public Configuration getConfiguration(); public Job getJob(); public HashMap<String,StringBuffer> handle(); public void stop(); }
apache-2.0
barancev/selenium3-html-suite-runner
src/main/java/ru/stqa/selenium/legrc/runner/steps/actions/OpenStep.java
802
package ru.stqa.selenium.legrc.runner.steps.actions; import ru.stqa.selenium.legrc.runner.RunContext; import ru.stqa.selenium.legrc.runner.Step; import ru.stqa.selenium.legrc.runner.StepOutcome; import ru.stqa.selenium.legrc.runner.steps.AbstractStep; import ru.stqa.selenium.legrc.runner.steps.VoidOutcome; import java.util.List; public class OpenStep extends AbstractStep { private String url; public OpenStep(List<String> args) { super(args, 1); this.url = args.get(1); } public static class Factory implements Step.Factory { @Override public Step create(List<String> args) { return new OpenStep(args); } } @Override public StepOutcome runInternal(RunContext ctx) { ctx.getSelenium().open(ctx.substitute(url)); return new VoidOutcome(); } }
apache-2.0
gstevey/gradle
subprojects/ide/src/main/java/org/gradle/plugins/ide/internal/tooling/BuildInvocationsBuilder.java
6072
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugins.ide.internal.tooling; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import org.gradle.api.GradleException; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.internal.project.ProjectTaskLister; import org.gradle.api.internal.tasks.PublicTaskSpecification; import org.gradle.plugins.ide.internal.tooling.model.DefaultBuildInvocations; import org.gradle.plugins.ide.internal.tooling.model.LaunchableGradleTask; import org.gradle.plugins.ide.internal.tooling.model.LaunchableGradleTaskSelector; import org.gradle.tooling.internal.gradle.DefaultProjectIdentifier; import org.gradle.tooling.internal.consumer.converters.TaskNameComparator; import org.gradle.tooling.provider.model.ToolingModelBuilder; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import static org.gradle.plugins.ide.internal.tooling.ToolingModelBuilderSupport.buildFromTask; public class BuildInvocationsBuilder implements ToolingModelBuilder { private final ProjectTaskLister taskLister; private final TaskNameComparator taskNameComparator; public BuildInvocationsBuilder(ProjectTaskLister taskLister) { this.taskLister = taskLister; this.taskNameComparator = new TaskNameComparator(); } @Override public boolean canBuild(String modelName) { return modelName.equals("org.gradle.tooling.model.gradle.BuildInvocations"); } @Override @SuppressWarnings("StringEquality") public DefaultBuildInvocations buildAll(String modelName, Project project) { if (!canBuild(modelName)) { throw new GradleException("Unknown model name " + modelName); } DefaultProjectIdentifier projectIdentifier = getProjectIdentifier(project); // construct task selectors List<LaunchableGradleTaskSelector> selectors = Lists.newArrayList(); Map<String, LaunchableGradleTaskSelector> selectorsByName = Maps.newTreeMap(Ordering.natural()); Set<String> visibleTasks = Sets.newLinkedHashSet(); findTasks(project, selectorsByName, visibleTasks); for (String selectorName : selectorsByName.keySet()) { LaunchableGradleTaskSelector selector = selectorsByName.get(selectorName); selectors.add(selector. setName(selectorName). setTaskName(selectorName). setProjectIdentifier(projectIdentifier). setDisplayName(selectorName + " in " + project + " and subprojects."). setPublic(visibleTasks.contains(selectorName))); } // construct project tasks List<LaunchableGradleTask> projectTasks = tasks(project); // construct build invocations from task selectors and project tasks return new DefaultBuildInvocations() .setSelectors(selectors) .setTasks(projectTasks) .setProjectIdentifier(projectIdentifier); } private DefaultProjectIdentifier getProjectIdentifier(Project project) { return new DefaultProjectIdentifier(project.getRootDir(), project.getPath()); } // build tasks without project reference private List<LaunchableGradleTask> tasks(Project project) { List<LaunchableGradleTask> tasks = Lists.newArrayList(); for (Task task : taskLister.listProjectTasks(project)) { tasks.add(buildFromTask(new LaunchableGradleTask(), task)); } return tasks; } private void findTasks(Project project, Map<String, LaunchableGradleTaskSelector> taskSelectors, Collection<String> visibleTasks) { for (Project child : project.getChildProjects().values()) { findTasks(child, taskSelectors, visibleTasks); } for (Task task : taskLister.listProjectTasks(project)) { // in the map, store a minimally populated LaunchableGradleTaskSelector that contains just the description and the path // replace the LaunchableGradleTaskSelector stored in the map iff we come across a task with the same name whose path has a smaller ordering // this way, for each task selector, its description will be the one from the selected task with the 'smallest' path if (!taskSelectors.containsKey(task.getName())) { LaunchableGradleTaskSelector taskSelector = new LaunchableGradleTaskSelector(). setDescription(task.getDescription()).setPath(task.getPath()); taskSelectors.put(task.getName(), taskSelector); } else { LaunchableGradleTaskSelector taskSelector = taskSelectors.get(task.getName()); if (hasPathWithLowerOrdering(task, taskSelector)) { taskSelector.setDescription(task.getDescription()).setPath(task.getPath()); } } // visible tasks are specified as those that have a non-empty group if (PublicTaskSpecification.INSTANCE.isSatisfiedBy(task)) { visibleTasks.add(task.getName()); } } } private boolean hasPathWithLowerOrdering(Task task, LaunchableGradleTaskSelector referenceTaskSelector) { return taskNameComparator.compare(task.getPath(), referenceTaskSelector.getPath()) < 0; } }
apache-2.0
before/quality-check
modules/quality-test/src/main/java/net/sf/qualitytest/blueprint/strategy/creation/RandomFloatValueCreationStrategy.java
1193
/******************************************************************************* * Copyright 2013 André Rouél and Dominik Seichter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package net.sf.qualitytest.blueprint.strategy.creation; import java.util.Random; /** * Strategy to create a random float value. * * @author Dominik Seichter */ public class RandomFloatValueCreationStrategy extends ValueCreationStrategy<Float> { private final Random random = new Random(); @Override public Float createValue(final Class<?> expectedClass) { return Float.valueOf(random.nextFloat()); } }
apache-2.0
OpenWiseSolutions/openhub-framework
core/src/main/java/org/openhubframework/openhub/core/common/version/impl/ManifestVersionInfoSource.java
7860
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openhubframework.openhub.core.common.version.impl; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.Manifest; import java.util.regex.PatternSyntaxException; import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.core.io.Resource; import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; import org.openhubframework.openhub.core.common.version.VersionInfo; import org.openhubframework.openhub.core.common.version.VersionInfoSource; /** * This class can be used to determine versions of application modules. * The version information is retrieved from manifest files (MANIFEST.MF) * that are available at classpath (JAR) and at servlet context path (WAR). * * @author Michal Palicka * @since 0.1 */ @Component public class ManifestVersionInfoSource implements VersionInfoSource, ApplicationContextAware { private static final Logger LOG = LoggerFactory.getLogger(ManifestVersionInfoSource.class); //---------------------------------------------------------------------- // class (static) fields //---------------------------------------------------------------------- private static String MANIFEST_CLASSPATH_RESOURCE_NAME = "classpath*:/META-INF/MANIFEST.MF"; private static String MANIFEST_CONTEXT_RESOURCE_NAME = "/META-INF/MANIFEST.MF"; private static String ATTR_TITLE = "Implementation-Title"; private static String ATTR_VENDOR_ID = "Implementation-Vendor-Id"; private static String ATTR_VERSION = "Implementation-Version"; private static String ATTR_REVISION = "Implementation-Build"; private static String ATTR_TIMESTAMP = "Implementation-Timestamp"; private static String ATTR_BUNDLE_SYMBOLIC_NAME = "Bundle-SymbolicName"; private static String ATTR_BUNDLE_NAME = "Bundle-Name"; private static String ATTR_BUNDLE_VERSION = "Bundle-Version"; //---------------------------------------------------------------------- // instance fields //---------------------------------------------------------------------- private ApplicationContext applicationContext; //---------------------------------------------------------------------- // public methods //---------------------------------------------------------------------- @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } //---------------------------------------------------------------------- // VersionInfoSource //---------------------------------------------------------------------- /** * Retrieves version information from all manifests that are available at classpath. * In many cases, the manifest attributes are not properly set and the version information may be incomplete. * The method allows to specify a filter that can be used to remove invalid entries from the result. * <p> * The filter is also an instance of {@code VersionInfo}, but its fields are expected to contain regular * expressions instead of plain values. * Each available version entry is matched against patterns in the filter (field-by-field). * If any of the fields does not match, the version entry is excluded from the result. * <p> * If the <em>filter</em> is {@code null}, all entries are returned. * <p> * If a field in the filter is set to {@code null}, then all values are allowed. * * @param filter the filter used to remove invalid or unwanted entries. * @return an array of version entries (in ascending order). */ @Override public VersionInfo[] getVersionInformation(@Nullable VersionInfo filter) { // use set to remove duplicate entries Set<VersionInfo> result = new HashSet<VersionInfo>(); try { // manifest files at classpath (JARs) result.addAll(getVersionData(MANIFEST_CLASSPATH_RESOURCE_NAME, filter)); if (applicationContext instanceof WebApplicationContext) { // manifest file from servlet context (WAR) result.addAll(getVersionData(MANIFEST_CONTEXT_RESOURCE_NAME, filter)); } } catch (Exception e) { LOG.warn("Unable to retrieve version information: {}", e.getMessage()); } return result.toArray(new VersionInfo[result.size()]); } //---------------------------------------------------------------------- // private methods //---------------------------------------------------------------------- private Collection<VersionInfo> getVersionData(String location, @Nullable VersionInfo filter) throws IOException, PatternSyntaxException { Collection<VersionInfo> result = new ArrayList<VersionInfo>(); Resource[] resources = applicationContext.getResources(location); for (Resource resource : resources) { try { InputStream is = resource.getInputStream(); if (is != null) { Manifest manifest = new Manifest(is); VersionInfo info = createVersionInfo(manifest); // Version entries may be empty or incomplete. // Return only entries that match the specified filter. if (info.matches(filter)) { result.add(info); } } } catch (IOException e) { LOG.error("Unable to process manifest resource '{}'", e, resource.getURL()); throw e; } catch (PatternSyntaxException e) { LOG.error("Unable to process version data, invalid filter '{}'", e, filter != null ? filter.toString() : null); } } return result; } private VersionInfo createVersionInfo(Manifest manifest) { Attributes attrs = manifest.getMainAttributes(); return new VersionInfo(getTitle(attrs), getVendorId(attrs), getVersion(attrs), attrs.getValue(ATTR_REVISION), attrs.getValue(ATTR_TIMESTAMP)); } private String getVersion(Attributes attrs) { String title = attrs.getValue(ATTR_VERSION); if (title == null) { title = attrs.getValue(ATTR_BUNDLE_VERSION); } return title; } private String getTitle(Attributes attrs) { String title = attrs.getValue(ATTR_TITLE); if (title == null) { title = attrs.getValue(ATTR_BUNDLE_NAME); } return title; } private String getVendorId(Attributes attrs) { String vendorId = attrs.getValue(ATTR_VENDOR_ID); if (vendorId == null) { vendorId = attrs.getValue(ATTR_BUNDLE_SYMBOLIC_NAME); } return vendorId; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-ssm/src/main/java/com/amazonaws/services/simplesystemsmanagement/model/MaintenanceWindowResourceType.java
1899
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model; import javax.annotation.Generated; /** * */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public enum MaintenanceWindowResourceType { INSTANCE("INSTANCE"), RESOURCE_GROUP("RESOURCE_GROUP"); private String value; private MaintenanceWindowResourceType(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return MaintenanceWindowResourceType corresponding to the value * * @throws IllegalArgumentException * If the specified value does not map to one of the known values in this enum. */ public static MaintenanceWindowResourceType fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } for (MaintenanceWindowResourceType enumEntry : MaintenanceWindowResourceType.values()) { if (enumEntry.toString().equals(value)) { return enumEntry; } } throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } }
apache-2.0
kanomiya/CradleOfNoesis
src/main/java/com/kanomiya/mcmod/cradleofnoesis/sanctuary/DecaySanctuary.java
2838
package com.kanomiya.mcmod.cradleofnoesis.sanctuary; import java.util.List; import net.minecraft.client.resources.I18n; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.World; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import com.kanomiya.mcmod.cradleofnoesis.api.sanctuary.SimpleSanctuary; /** * @author Kanomiya * */ public class DecaySanctuary extends SimpleSanctuary { protected int interval, timer; protected int decayAmount; public DecaySanctuary() { this(0f,0,0,0); } public DecaySanctuary(float radius, int maxAge, int decayAmount, int interval) { super(radius, maxAge, 0xAAA03F8C); this.decayAmount = decayAmount; this.interval = interval; setUnlocalizedName("decaySanctuary"); } @Override public void onUpdate(World worldIn, double posX, double posY, double posZ) { super.onUpdate(worldIn, posX, posY, posZ); ++timer; if (interval < timer) { Vec3d vecPos = new Vec3d(posX, posY, posZ); for (int i=0; i<decayAmount; ++i) { Vec3d vecTgt = vecPos.addVector(worldIn.rand.nextGaussian() *radius*2 -radius, worldIn.rand.nextGaussian() *radius*2 -radius, worldIn.rand.nextGaussian() *radius -radius); if (vecPos.subtract(vecTgt).lengthVector() <= radius) { BlockPos blockPos = new BlockPos(vecTgt); if (worldIn.getTileEntity(blockPos) == null) // インスタントブロックを壊さないように...仕方ないね { worldIn.destroyBlock(blockPos, false); } } } timer = 0; } } @Override @SideOnly(Side.CLIENT) public void addInformation(List<String> tooltip, boolean advanced) { super.addInformation(tooltip, advanced); tooltip.add(I18n.format("vocabulary.sanctuary.interval") + ": " + getInterval()); tooltip.add(I18n.format("vocabulary.sanctuary.decayAmount") + ": " + getDecayAmount()); } public int getDecayAmount() { return decayAmount; } public void setDecayAmount(int decayAmount) { this.decayAmount = decayAmount; } /** * @return interval */ public int getInterval() { return interval; } /** * @param interval セットする interval */ public void setInterval(int interval) { this.interval = interval; } /** * @inheritDoc */ @Override public NBTTagCompound serializeNBT() { NBTTagCompound nbt = super.serializeNBT(); nbt.setInteger("interval", interval); nbt.setInteger("timer", timer); nbt.setInteger("decayAmount", decayAmount); return nbt; } /** * @inheritDoc */ @Override public void deserializeNBT(NBTTagCompound nbt) { super.deserializeNBT(nbt); interval = nbt.getInteger("interval"); timer = nbt.getInteger("timer"); decayAmount = nbt.getInteger("decayAmount"); } }
apache-2.0
afs/rdf-delta
rdf-delta-cmds/src/main/java/org/seaborne/delta/cmds/DeltaCmd.java
6361
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. */ package org.seaborne.delta.cmds; import java.util.List ; import java.util.Optional ; import org.apache.commons.lang3.StringUtils ; import org.apache.jena.atlas.web.HttpException ; import org.apache.jena.cmd.ArgDecl; import org.apache.jena.cmd.CmdException; import org.apache.jena.cmd.CmdGeneral; import org.seaborne.delta.DataSourceDescription ; import org.seaborne.delta.DeltaOps ; import org.seaborne.delta.Id ; import org.seaborne.delta.PatchLogInfo; import org.seaborne.delta.client.DeltaLinkHTTP ; import org.seaborne.delta.link.DeltaLink ; abstract public class DeltaCmd extends CmdGeneral { // Environment variable, for commands, for the remote server to work with. public static final String ENV_SERVER_URL = "DELTA_SERVER_URL"; static ArgDecl argServer = new ArgDecl(true, "server"); static ArgDecl argLogName = new ArgDecl(true, "log", "dsrc", "dataset"); static ArgDecl argDataSourceURI = new ArgDecl(true, "uri", "dsrcuri"); public DeltaCmd(String[] argv) { super(argv) ; super.add(argServer); } @Override final protected void processModulesAndArgs() { // Set a default for serverURL serverURL = System.getenv(ENV_SERVER_URL); super.processModulesAndArgs(); if ( serverURL == null && ! contains(argServer) ) throw new CmdException("Required: --server URL"); checkForMandatoryArgs(); if ( contains(argServer) ) { serverURL = getValue(argServer); } if ( contains(argLogName) ) { dataSourceName = getValue(argLogName); if ( dataSourceName.isEmpty() ) throw new CmdException("Empty string for data source name"); if ( StringUtils.containsAny(dataSourceName, "/ ?#") ) { // First bad character: int idx = StringUtils.indexOfAny(serverURL, dataSourceName); char ch = dataSourceName.charAt(idx); throw new CmdException(String.format("Illegal character '%c' in data source name: '%s'", ch, dataSourceName)); } if ( ! DeltaOps.isValidName(dataSourceName) ) throw new CmdException("Not a valid data source name: '"+dataSourceName+"'"); String s = serverURL; if ( ! s.endsWith("/") ) s = s+"/"; dataSourceURI = s + dataSourceName; } if ( contains(argDataSourceURI) ) { dataSourceURI = getValue(argDataSourceURI); if ( StringUtils.containsAny(dataSourceURI, "<>?#") ) throw new CmdException("Bad data source URI: '"+dataSourceURI+"'"); } dLink = DeltaLinkHTTP.connect(serverURL); } protected abstract void checkForMandatoryArgs(); protected String serverURL = null ; protected String dataSourceName = null ; protected String dataSourceURI = null ; protected DeltaLink dLink = null ; protected Id clientId = Id.create() ; @Override protected void exec() { try { execCmd(); } catch (HttpException ex) { throw new CmdException(messageFromHttpException(ex)) ; } } @Override protected String getCommandName() { String name = this.getClass().getSimpleName(); switch (name) { case "": name = "anon"; break; case "[]": name = "anon[]"; break; } return name; } protected abstract void execCmd(); protected List<DataSourceDescription> getDescriptions() { return dLink.listDescriptions(); } protected List<PatchLogInfo> getPatchLogInfo() { return dLink.listPatchLogInfo(); } protected DataSourceDescription getDescription() { if ( dataSourceName != null ) { DataSourceDescription dsd = dLink.getDataSourceDescriptionByName(dataSourceName); if ( dsd != null ) return dsd; throw new CmdException("Source '"+dataSourceName+"' does not exist"); } if ( dataSourceURI != null ) { DataSourceDescription dsd = dLink.getDataSourceDescriptionByURI(dataSourceURI); if ( dsd != null ) return dsd; throw new CmdException("Source '"+dataSourceURI+"' does not exist"); } throw new CmdException("No data source name and no data source URI"); } protected String messageFromHttpException(HttpException ex) { Throwable ex2 = ex; if ( ex.getCause() != null ) ex2 = ex.getCause(); return ex2.getMessage(); } protected Optional<DataSourceDescription> find(String name, String url) { if ( name != null ) return findByName(name); if ( url != null ) return findByName(url); throw new CmdException("No name or URI for the source"); } protected Optional<DataSourceDescription> findByURI(String uri) { DataSourceDescription dsd = dLink.getDataSourceDescriptionByURI(uri); return Optional.ofNullable(dsd); } protected Optional<DataSourceDescription> findByName(String name) { DataSourceDescription dsd = dLink.getDataSourceDescriptionByName(name); return Optional.ofNullable(dsd); } // protected Optional<Id> find(Predicate<DataSourceDescription> predicate) { // List <DataSourceDescription> all = dLink.listDescriptions(); // return // all.stream() // .filter(predicate) // .findFirst() // .map(dsd->dsd.getId()); // } }
apache-2.0
OpenGamma/Strata
modules/product/src/test/java/com/opengamma/strata/product/fra/FraTest.java
12286
/* * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.product.fra; import static com.opengamma.strata.basics.currency.Currency.AUD; import static com.opengamma.strata.basics.currency.Currency.GBP; import static com.opengamma.strata.basics.currency.Currency.NZD; import static com.opengamma.strata.basics.currency.Currency.USD; import static com.opengamma.strata.basics.date.BusinessDayConventions.MODIFIED_FOLLOWING; import static com.opengamma.strata.basics.date.DayCounts.ACT_360; import static com.opengamma.strata.basics.date.DayCounts.ACT_365F; import static com.opengamma.strata.basics.date.HolidayCalendarIds.GBLO; import static com.opengamma.strata.basics.date.HolidayCalendarIds.SAT_SUN; import static com.opengamma.strata.basics.date.Tenor.TENOR_3M; import static com.opengamma.strata.basics.index.IborIndices.GBP_LIBOR_2M; import static com.opengamma.strata.basics.index.IborIndices.GBP_LIBOR_3M; import static com.opengamma.strata.collect.TestHelper.assertSerialization; import static com.opengamma.strata.collect.TestHelper.coverBeanEquals; import static com.opengamma.strata.collect.TestHelper.coverImmutableBean; import static com.opengamma.strata.collect.TestHelper.date; import static com.opengamma.strata.product.common.BuySell.BUY; import static com.opengamma.strata.product.common.BuySell.SELL; import static com.opengamma.strata.product.fra.FraDiscountingMethod.AFMA; import static com.opengamma.strata.product.fra.FraDiscountingMethod.ISDA; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.data.Offset.offset; import java.time.LocalTime; import java.time.ZoneId; import java.util.Optional; import org.junit.jupiter.api.Test; import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.date.AdjustableDate; import com.opengamma.strata.basics.date.BusinessDayAdjustment; import com.opengamma.strata.basics.date.DaysAdjustment; import com.opengamma.strata.basics.date.TenorAdjustment; import com.opengamma.strata.basics.index.ImmutableIborIndex; import com.opengamma.strata.product.rate.IborInterpolatedRateComputation; import com.opengamma.strata.product.rate.IborRateComputation; /** * Test. */ public class FraTest { private static final ReferenceData REF_DATA = ReferenceData.standard(); private static final double NOTIONAL_1M = 1_000_000d; private static final double NOTIONAL_2M = 2_000_000d; private static final double FIXED_RATE = 0.025d; private static final BusinessDayAdjustment BDA_MOD_FOLLOW = BusinessDayAdjustment.of(MODIFIED_FOLLOWING, GBLO); private static final DaysAdjustment PLUS_TWO_DAYS = DaysAdjustment.ofBusinessDays(2, GBLO); private static final DaysAdjustment MINUS_TWO_DAYS = DaysAdjustment.ofBusinessDays(-2, GBLO); private static final DaysAdjustment MINUS_FIVE_DAYS = DaysAdjustment.ofBusinessDays(-5, GBLO); //------------------------------------------------------------------------- @Test public void test_builder() { Fra test = sut(); assertThat(test.getBuySell()).isEqualTo(BUY); assertThat(test.getCurrency()).isEqualTo(GBP); // defaulted assertThat(test.getNotional()).isCloseTo(NOTIONAL_1M, offset(0d)); assertThat(test.getStartDate()).isEqualTo(date(2015, 6, 15)); assertThat(test.getEndDate()).isEqualTo(date(2015, 9, 15)); assertThat(test.getBusinessDayAdjustment()).isEqualTo(Optional.empty()); assertThat(test.getPaymentDate()).isEqualTo(AdjustableDate.of(date(2015, 6, 15))); assertThat(test.getFixedRate()).isCloseTo(FIXED_RATE, offset(0d)); assertThat(test.getIndex()).isEqualTo(GBP_LIBOR_3M); assertThat(test.getIndexInterpolated()).isEqualTo(Optional.empty()); assertThat(test.getFixingDateOffset()).isEqualTo(GBP_LIBOR_3M.getFixingDateOffset()); // defaulted assertThat(test.getDayCount()).isEqualTo(ACT_365F); // defaulted assertThat(test.getDiscounting()).isEqualTo(ISDA); // defaulted assertThat(test.isCrossCurrency()).isFalse(); assertThat(test.allPaymentCurrencies()).containsOnly(GBP); assertThat(test.allCurrencies()).containsOnly(GBP); } @Test public void test_builder_AUD() { ImmutableIborIndex dummyIndex = ImmutableIborIndex.builder() .name("AUD-INDEX-3M") .currency(AUD) .dayCount(ACT_360) .fixingDateOffset(MINUS_TWO_DAYS) .effectiveDateOffset(PLUS_TWO_DAYS) .maturityDateOffset(TenorAdjustment.ofLastDay(TENOR_3M, BDA_MOD_FOLLOW)) .fixingCalendar(SAT_SUN) .fixingTime(LocalTime.NOON) .fixingZone(ZoneId.of("Australia/Sydney")) .build(); Fra test = Fra.builder() .buySell(BUY) .notional(NOTIONAL_1M) .startDate(date(2015, 6, 15)) .endDate(date(2015, 9, 15)) .paymentDate(AdjustableDate.of(date(2015, 6, 16))) .fixedRate(FIXED_RATE) .index(dummyIndex) .fixingDateOffset(MINUS_TWO_DAYS) .build(); assertThat(test.getBuySell()).isEqualTo(BUY); assertThat(test.getCurrency()).isEqualTo(AUD); // defaulted assertThat(test.getNotional()).isCloseTo(NOTIONAL_1M, offset(0d)); assertThat(test.getStartDate()).isEqualTo(date(2015, 6, 15)); assertThat(test.getEndDate()).isEqualTo(date(2015, 9, 15)); assertThat(test.getBusinessDayAdjustment()).isEqualTo(Optional.empty()); assertThat(test.getPaymentDate()).isEqualTo(AdjustableDate.of(date(2015, 6, 16))); assertThat(test.getFixedRate()).isCloseTo(FIXED_RATE, offset(0d)); assertThat(test.getIndex()).isEqualTo(dummyIndex); assertThat(test.getIndexInterpolated()).isEqualTo(Optional.empty()); assertThat(test.getFixingDateOffset()).isEqualTo(MINUS_TWO_DAYS); assertThat(test.getDayCount()).isEqualTo(ACT_360); // defaulted assertThat(test.getDiscounting()).isEqualTo(AFMA); // defaulted } @Test public void test_builder_NZD() { ImmutableIborIndex dummyIndex = ImmutableIborIndex.builder() .name("NZD-INDEX-3M") .currency(NZD) .dayCount(ACT_360) .fixingDateOffset(MINUS_TWO_DAYS) .effectiveDateOffset(PLUS_TWO_DAYS) .maturityDateOffset(TenorAdjustment.ofLastDay(TENOR_3M, BDA_MOD_FOLLOW)) .fixingCalendar(SAT_SUN) .fixingTime(LocalTime.NOON) .fixingZone(ZoneId.of("NZ")) .build(); Fra test = Fra.builder() .buySell(BUY) .notional(NOTIONAL_1M) .startDate(date(2015, 6, 15)) .endDate(date(2015, 9, 15)) .paymentDate(AdjustableDate.of(date(2015, 6, 16))) .fixedRate(FIXED_RATE) .index(dummyIndex) .fixingDateOffset(MINUS_TWO_DAYS) .build(); assertThat(test.getBuySell()).isEqualTo(BUY); assertThat(test.getCurrency()).isEqualTo(NZD); // defaulted assertThat(test.getNotional()).isCloseTo(NOTIONAL_1M, offset(0d)); assertThat(test.getStartDate()).isEqualTo(date(2015, 6, 15)); assertThat(test.getEndDate()).isEqualTo(date(2015, 9, 15)); assertThat(test.getBusinessDayAdjustment()).isEqualTo(Optional.empty()); assertThat(test.getPaymentDate()).isEqualTo(AdjustableDate.of(date(2015, 6, 16))); assertThat(test.getFixedRate()).isCloseTo(FIXED_RATE, offset(0d)); assertThat(test.getIndex()).isEqualTo(dummyIndex); assertThat(test.getIndexInterpolated()).isEqualTo(Optional.empty()); assertThat(test.getFixingDateOffset()).isEqualTo(MINUS_TWO_DAYS); assertThat(test.getDayCount()).isEqualTo(ACT_360); // defaulted assertThat(test.getDiscounting()).isEqualTo(AFMA); // defaulted } @Test public void test_builder_datesInOrder() { assertThatIllegalArgumentException() .isThrownBy(() -> Fra.builder() .buySell(BUY) .notional(NOTIONAL_1M) .startDate(date(2015, 6, 15)) .endDate(date(2015, 6, 14)) .fixedRate(FIXED_RATE) .index(GBP_LIBOR_3M) .build()); } @Test public void test_builder_noIndex() { assertThatIllegalArgumentException() .isThrownBy(() -> Fra.builder() .buySell(BUY) .notional(NOTIONAL_1M) .startDate(date(2015, 6, 15)) .endDate(date(2015, 9, 15)) .fixedRate(FIXED_RATE) .build()); } @Test public void test_builder_noDates() { assertThatIllegalArgumentException() .isThrownBy(() -> Fra.builder() .buySell(BUY) .notional(NOTIONAL_1M) .endDate(date(2015, 9, 15)) .fixedRate(FIXED_RATE) .index(GBP_LIBOR_3M) .build()); } //------------------------------------------------------------------------- @Test public void test_resolve_Ibor() { Fra fra = Fra.builder() .buySell(BUY) .notional(NOTIONAL_1M) .startDate(date(2015, 6, 15)) .endDate(date(2015, 9, 15)) .paymentDate(AdjustableDate.of(date(2015, 6, 20), BDA_MOD_FOLLOW)) .fixedRate(FIXED_RATE) .index(GBP_LIBOR_3M) .fixingDateOffset(MINUS_TWO_DAYS) .build(); ResolvedFra test = fra.resolve(REF_DATA); assertThat(test.getCurrency()).isEqualTo(GBP); assertThat(test.getNotional()).isCloseTo(NOTIONAL_1M, offset(0d)); assertThat(test.getStartDate()).isEqualTo(date(2015, 6, 15)); assertThat(test.getEndDate()).isEqualTo(date(2015, 9, 15)); assertThat(test.getPaymentDate()).isEqualTo(date(2015, 6, 22)); assertThat(test.getFixedRate()).isCloseTo(FIXED_RATE, offset(0d)); assertThat(test.getFloatingRate()).isEqualTo(IborRateComputation.of(GBP_LIBOR_3M, date(2015, 6, 11), REF_DATA)); assertThat(test.getYearFraction()).isCloseTo(ACT_365F.yearFraction(date(2015, 6, 15), date(2015, 9, 15)), offset(0d)); assertThat(test.getDiscounting()).isEqualTo(ISDA); } @Test public void test_resolve_IborInterpolated() { Fra fra = Fra.builder() .buySell(SELL) .notional(NOTIONAL_1M) .startDate(date(2015, 6, 12)) .endDate(date(2015, 9, 5)) .businessDayAdjustment(BDA_MOD_FOLLOW) .fixedRate(FIXED_RATE) .index(GBP_LIBOR_3M) .indexInterpolated(GBP_LIBOR_2M) .fixingDateOffset(MINUS_TWO_DAYS) .build(); ResolvedFra test = fra.resolve(REF_DATA); assertThat(test.getCurrency()).isEqualTo(GBP); assertThat(test.getNotional()).isCloseTo(-NOTIONAL_1M, offset(0d)); // sell assertThat(test.getStartDate()).isEqualTo(date(2015, 6, 12)); assertThat(test.getEndDate()).isEqualTo(date(2015, 9, 7)); assertThat(test.getPaymentDate()).isEqualTo(date(2015, 6, 12)); assertThat(test.getFixedRate()).isCloseTo(FIXED_RATE, offset(0d)); assertThat(test.getFloatingRate()) .isEqualTo(IborInterpolatedRateComputation.of(GBP_LIBOR_2M, GBP_LIBOR_3M, date(2015, 6, 10), REF_DATA)); assertThat(test.getYearFraction()).isCloseTo(ACT_365F.yearFraction(date(2015, 6, 12), date(2015, 9, 7)), offset(0d)); assertThat(test.getDiscounting()).isEqualTo(ISDA); } //------------------------------------------------------------------------- @Test public void coverage() { coverImmutableBean(sut()); coverBeanEquals(sut(), sut2()); } @Test public void test_serialization() { assertSerialization(sut()); } //------------------------------------------------------------------------- static Fra sut() { return Fra.builder() .buySell(BUY) .notional(NOTIONAL_1M) .startDate(date(2015, 6, 15)) .endDate(date(2015, 9, 15)) .fixedRate(FIXED_RATE) .index(GBP_LIBOR_3M) .build(); } static Fra sut2() { return Fra.builder() .buySell(SELL) .currency(USD) .notional(NOTIONAL_2M) .startDate(date(2015, 6, 16)) .endDate(date(2015, 8, 17)) .businessDayAdjustment(BDA_MOD_FOLLOW) .paymentDate(AdjustableDate.of(date(2015, 6, 17))) .dayCount(ACT_360) .fixedRate(0.30d) .index(GBP_LIBOR_2M) .indexInterpolated(GBP_LIBOR_3M) .fixingDateOffset(MINUS_FIVE_DAYS) .discounting(FraDiscountingMethod.NONE) .build(); } }
apache-2.0
boneman1231/org.apache.felix
trunk/ipojo/tests/core/annotations/src/main/java/org/apache/felix/ipojo/test/scenarios/annotations/TemporalDependencies.java
7964
package org.apache.felix.ipojo.test.scenarios.annotations; import org.apache.felix.ipojo.junit4osgi.OSGiTestCase; import org.apache.felix.ipojo.junit4osgi.helpers.IPOJOHelper; import org.apache.felix.ipojo.metadata.Element; public class TemporalDependencies extends OSGiTestCase { private IPOJOHelper helper; public void setUp() { helper = new IPOJOHelper(this); } public void testSimple() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalSimple"); Element[] provs = meta.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String to = provs[0].getAttribute("timeout"); assertNull("No timeout", to); String oto = provs[0].getAttribute("onTimeout"); assertNull("No onTimeout", oto); } public void testTemporal() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.Temporal"); Element[] provs = meta.getElements("temporal", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String to = provs[0].getAttribute("timeout"); assertNull("No timeout", to); String oto = provs[0].getAttribute("onTimeout"); assertNull("No onTimeout", oto); } public void testDI() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalWithDI"); Element[] provs = meta.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String oto = provs[0].getAttribute("onTimeout"); assertEquals("onTimeout is the DI", "org.apache.felix.ipojo.test.scenarios.component.ProvidesSimple", oto); } public void testEmptyArray() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalWithEmptyArray"); Element[] provs = meta.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String oto = provs[0].getAttribute("onTimeout"); assertEquals("onTimeout is empty-array", "empty-array", oto); } public void testNull() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalWithNull"); Element[] provs = meta.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String oto = provs[0].getAttribute("onTimeout"); assertEquals("onTimeout is null", "null", oto); } public void testNullable() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalWithNullable"); Element[] provs = meta.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String oto = provs[0].getAttribute("onTimeout"); assertEquals("onTimeout is nullable", "nullable", oto); } public void testFilter() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalWithFilter"); Element[] provs = meta.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String filter = provs[0].getAttribute("filter"); assertEquals("Filter", "(vendor=clement)", filter); } public void testTimeout() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalWithTimeout"); Element[] provs = meta.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); String field = provs[0].getAttribute("field"); assertNotNull("Field not null", field); assertEquals("Field is fs", "fs", field); String to = provs[0].getAttribute("timeout"); assertEquals("Check timeout", "100", to); } public void testSimpleCollection() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalCollection"); Element dep = getElementPerField(meta, "fs1"); String spec = dep.getAttribute("specification"); assertNotNull("Specification not null", spec); assertEquals("Check specification", "org.apache.felix.ipojo.test.scenarios.annotations.service.FooService", spec); } public void testCollectionWithTimeout() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalCollection"); Element dep = getElementPerField(meta, "fs2"); String spec = dep.getAttribute("specification"); assertNotNull("Specification not null", spec); assertEquals("Check specification", "org.apache.felix.ipojo.test.scenarios.annotations.service.FooService", spec); String to = dep.getAttribute("timeout"); assertEquals("Check timeout", "300", to); } public void testCollectionWithPolicy() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalCollection"); Element dep = getElementPerField(meta, "fs3"); String spec = dep.getAttribute("specification"); assertNotNull("Specification not null", spec); assertEquals("Check specification", "org.apache.felix.ipojo.test.scenarios.annotations.service.FooService", spec); String to = dep.getAttribute("ontimeout"); assertEquals("Check policy", "empty", to); } public void testCollectionWithProxy() { Element meta = helper.getMetadata("org.apache.felix.ipojo.test.scenarios.component.temporal.TemporalCollection"); Element dep = getElementPerField(meta, "fs4"); String spec = dep.getAttribute("specification"); assertNotNull("Specification not null", spec); assertEquals("Check specification", "org.apache.felix.ipojo.test.scenarios.annotations.service.FooService", spec); String proxy = dep.getAttribute("proxy"); assertEquals("Check proxy", "true", proxy); } private Element getElementPerField(Element elem, String field) { Element[] provs = elem.getElements("requires", "org.apache.felix.ipojo.handler.temporal"); assertNotNull("Temporal exists ", provs); for (int i = 0; i < provs.length; i++) { if (provs[i].getAttribute("field").equals(field)) { return provs[i]; } } return null; } }
apache-2.0
kryptnostic/kodex
src/main/java/com/kryptnostic/kodex/v1/marshalling/DeflatingJacksonMarshaller.java
3062
package com.kryptnostic.kodex.v1.marshalling; import java.io.IOException; import java.nio.ByteBuffer; import java.util.zip.DataFormatException; import java.util.zip.Deflater; import java.util.zip.Inflater; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.kryptnostic.kodex.v1.serialization.jackson.KodexObjectMapperFactory; public class DeflatingJacksonMarshaller { private final ObjectMapper mapper = KodexObjectMapperFactory.getObjectMapper(); protected static final int INTEGER_BYTES = Integer.SIZE / Byte.SIZE; public <T> T fromBytes( byte[] bytes , Class<T> reference) throws IOException { final Inflater inflater = new Inflater(); ByteBuffer in = ByteBuffer.wrap( bytes ); int uncompressedLength = in.getInt(); byte[] compressedBytes = new byte[ bytes.length - INTEGER_BYTES ]; byte[] uncompressedBytes = new byte[ uncompressedLength ]; in.get( compressedBytes ); inflater.setInput( compressedBytes ); int resultingLength = 0; try { resultingLength = inflater.inflate( uncompressedBytes ); } catch ( DataFormatException e ) { throw new IOException( e ); } Preconditions.checkState( resultingLength == uncompressedLength, "Expected length and decompressed length do not match." ); return mapper.readValue( uncompressedBytes, reference ); } public<T> T fromBytes( byte[] bytes , TypeReference<T> reference) throws IOException { final Inflater inflater = new Inflater(); ByteBuffer in = ByteBuffer.wrap( bytes ); int uncompressedLength = in.getInt(); byte[] compressedBytes = new byte[ bytes.length - INTEGER_BYTES ]; byte[] uncompressedBytes = new byte[ uncompressedLength ]; in.get( compressedBytes ); inflater.setInput( compressedBytes ); int resultingLength = 0; try { resultingLength = inflater.inflate( uncompressedBytes ); } catch ( DataFormatException e ) { throw new IOException( e ); } Preconditions.checkState( resultingLength == uncompressedLength, "Expected length and decompressed length do not match." ); return mapper.readValue( uncompressedBytes, reference ); } public byte[] toBytes( Object object ) throws IOException { final Deflater deflater = new Deflater( Deflater.BEST_COMPRESSION ); byte[] input = mapper.writeValueAsBytes( object ); byte[] output = new byte[ input.length << 1 ]; deflater.setInput( input ); deflater.finish(); int compressedBytes = deflater.deflate( output, 0, output.length, Deflater.FULL_FLUSH ); ByteBuffer o = ByteBuffer.allocate( INTEGER_BYTES + compressedBytes ); o.putInt( input.length ); o.put( output, 0, compressedBytes ); return o.array(); } }
apache-2.0
motorina0/flowable-engine
modules/flowable-dmn-rest/src/main/java/org/flowable/rest/dmn/service/api/repository/BaseDmnDeploymentResourceDataResource.java
2798
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.rest.dmn.service.api.repository; import java.io.InputStream; import java.util.List; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.IOUtils; import org.flowable.dmn.api.DmnDeployment; import org.flowable.dmn.api.DmnRepositoryService; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.common.api.FlowableIllegalArgumentException; import org.flowable.engine.common.api.FlowableObjectNotFoundException; import org.flowable.rest.application.ContentTypeResolver; import org.springframework.beans.factory.annotation.Autowired; /** * @author Yvo Swillens */ public class BaseDmnDeploymentResourceDataResource { @Autowired protected ContentTypeResolver contentTypeResolver; @Autowired protected DmnRepositoryService dmnRepositoryService; protected byte[] getDmnDeploymentResourceData(String deploymentId, String resourceId, HttpServletResponse response) { if (deploymentId == null) { throw new FlowableIllegalArgumentException("No deployment id provided"); } if (resourceId == null) { throw new FlowableIllegalArgumentException("No resource id provided"); } // Check if deployment exists DmnDeployment deployment = dmnRepositoryService.createDeploymentQuery().deploymentId(deploymentId).singleResult(); if (deployment == null) { throw new FlowableObjectNotFoundException("Could not find a DMN deployment with id '" + deploymentId); } List<String> resourceList = dmnRepositoryService.getDeploymentResourceNames(deploymentId); if (resourceList.contains(resourceId)) { final InputStream resourceStream = dmnRepositoryService.getResourceAsStream(deploymentId, resourceId); String contentType = contentTypeResolver.resolveContentType(resourceId); response.setContentType(contentType); try { return IOUtils.toByteArray(resourceStream); } catch (Exception e) { throw new FlowableException("Error converting resource stream", e); } } else { // Resource not found in deployment throw new FlowableObjectNotFoundException("Could not find a resource with id '" + resourceId + "' in deployment '" + deploymentId); } } }
apache-2.0
bbranquinho/easy-buying
src/main/java/org/wpattern/easy/buying/permission/PermissionResource.java
428
package org.wpattern.easy.buying.permission; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import org.wpattern.easy.buying.utils.GenericService; import org.wpattern.easy.buying.utils.ResourcePaths; @RestController @RequestMapping(path = ResourcePaths.PERMISSION_PATH) public class PermissionResource extends GenericService<Permission, Long> { }
apache-2.0
s-webber/projog
src/main/java/org/projog/core/predicate/builtin/list/KeySort.java
3947
/* * Copyright 2013 S. Webber * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.projog.core.predicate.builtin.list; import static java.util.Collections.sort; import static org.projog.core.term.ListFactory.createList; import static org.projog.core.term.ListUtils.toJavaUtilList; import static org.projog.core.term.TermComparator.TERM_COMPARATOR; import java.util.Comparator; import java.util.List; import org.projog.core.ProjogException; import org.projog.core.predicate.AbstractSingleResultPredicate; import org.projog.core.term.Term; import org.projog.core.term.TermType; /* TEST %?- keysort([a - 1,b - 3,c - 2], X) % X=[a - 1,b - 3,c - 2] %?- keysort([c - 2,a - 1,b - 3], X) % X=[a - 1,b - 3,c - 2] %TRUE keysort([c - 2,a - 1,b - 3], [a - 1,b - 3,c - 2]) %FAIL keysort([c - 2,a - 1,b - 3], [c - 2,a - 1,b - 3]) % Duplicates are <i>not</i> removed. %?- keysort([a - 1,a - 9,a - 1,z - 1, q - 3, z - 1], X) % X=[a - 1,a - 9,a - 1,q - 3,z - 1,z - 1] % Keys are sorted using the standard ordering of terms. %?- keysort([Variable - v,1.0 - v,1 - v,atom - v, [] - v,structure(a) - v,[list] - v], X) % X=[Variable - v,1.0 - v,1 - v,[] - v,atom - v,structure(a) - v,[list] - v] % Variable=UNINSTANTIATED VARIABLE %?- keysort([[list] - v,structure(a) - v,[] - v,atom - v,1 - v,1.0 - v,Variable - v], X) % X=[Variable - v,1.0 - v,1 - v,[] - v,atom - v,structure(a) - v,[list] - v] % Variable=UNINSTANTIATED VARIABLE % Both the first and second arguments can contain variables. %?- keysort([c - Q,a - W,b - E],[R - 1,T - 2,Y - 3]) % Q=3 % W=1 % E=2 % R=a % T=b % Y=c */ /** * <code>keysort(X,Y)</code> - sorts a list of key/value pairs. * <p> * Sorts the list <code>X</code>, containing <i>key/value pairs</i>, and attempts to unify the result with * <code>Y</code>. Key/value pairs are compound terms with a functor of <code>-</code> and two arguments. The first * argument is the <i>key</i> and the second argument is the <i>value</i>. It is the key of the key/value pairs that is * used to sort the elements contained in <code>X</code>. (Note: duplicates are <i>not</i> removed.) */ public final class KeySort extends AbstractSingleResultPredicate { private static final String KEY_VALUE_PAIR_FUNCTOR = "-"; private static final Comparator<Term> KEY_VALUE_PAIR_COMPARATOR = new Comparator<Term>() { @Override public int compare(Term kvp1, Term kvp2) { return TERM_COMPARATOR.compare(kvp1.getArgument(0), kvp2.getArgument(0)); } }; @Override protected boolean evaluate(final Term original, final Term result) { final List<Term> elements = toJavaUtilList(original); if (elements == null) { throw new ProjogException("Expected first argument to be a fully instantied list but got: " + original); } assertKeyValuePairs(elements); sort(elements, KEY_VALUE_PAIR_COMPARATOR); return result.unify(createList(elements)); } private boolean assertKeyValuePairs(List<Term> elements) { for (Term t : elements) { if (!assertKeyValuePair(t)) { throw new ProjogException("Expected every element of list to be a compound term with a functor of - and two arguments but got: " + t); } } return true; } private boolean assertKeyValuePair(Term t) { return t.getType() == TermType.STRUCTURE && KEY_VALUE_PAIR_FUNCTOR.equals(t.getName()) && t.getNumberOfArguments() == 2; } }
apache-2.0
jinahya/jinahya-se
src/main/java/com/github/jinahya/xml/bind/MapValuesAdapter.java
2454
/* * Copyright 2011 Jin Kwon <jinahya at gmail.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.jinahya.xml.bind; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.adapters.XmlAdapter; /** * An XmlAdapter for {@link MapValues} and {@link Map}. * * @author Jin Kwon <jinahya at gmail.com> * @param <T> MapValues type parameter * @param <K> Key type parameter * @param <V> Value type parameter */ public abstract class MapValuesAdapter<T extends MapValues<V>, K, V> extends XmlAdapter<T, Map<K, V>> { /** * Creates a new instance. * * @param mapValuesType MapValues type. */ public MapValuesAdapter(final Class<T> mapValuesType) { super(); if (mapValuesType == null) { throw new NullPointerException("mapValuesType"); } this.mapValuesType = mapValuesType; } @Override public Map<K, V> unmarshal(final T v) throws Exception { if (v == null) { return null; } final Map<K, V> b = new HashMap<K, V>(v.getValues().size()); for (V value : v.getValues()) { b.put(getKey(value), value); } return b; } /** * Returns the map key for specified map value. * * @param value the map value * * @return map key for specified {@code value}. */ protected abstract K getKey(V value); @Override public T marshal(final Map<K, V> b) throws Exception { if (b == null) { return null; } final T v = mapValuesType.newInstance(); final List<V> values = v.getValues(); ((ArrayList<V>) values).ensureCapacity(b.size()); values.addAll(b.values()); return v; } /** * map values type. */ private final Class<T> mapValuesType; }
apache-2.0
glustful/gelanghe
Mh/src/com/maybe/mh/upload/CustomMultipartEntity.java
1677
package com.maybe.mh.upload; import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.Charset; import org.apache.http.entity.mime.HttpMultipartMode; import org.apache.http.entity.mime.MultipartEntity; public class CustomMultipartEntity extends MultipartEntity { private final ProgressListener listener; public CustomMultipartEntity(final ProgressListener listener) { super(); this.listener = listener; } public CustomMultipartEntity(final HttpMultipartMode mode, final ProgressListener listener) { super(mode); this.listener = listener; } public CustomMultipartEntity(HttpMultipartMode mode, final String boundary, final Charset charset, final ProgressListener listener) { super(mode, boundary, charset); this.listener = listener; } @Override public void writeTo(OutputStream outstream) throws IOException { super.writeTo(new CountingOutputStream(outstream, this.listener)); } public static interface ProgressListener { void transferred(long num); } public static class CountingOutputStream extends FilterOutputStream { private final ProgressListener listener; private long transferred; public CountingOutputStream(final OutputStream out, final ProgressListener listener) { super(out); this.listener = listener; this.transferred = 0; } public void write(byte[] b, int off, int len) throws IOException { out.write(b, off, len); this.transferred += len; this.listener.transferred(this.transferred); } public void write(int b) throws IOException { out.write(b); this.transferred++; this.listener.transferred(this.transferred); } } }
apache-2.0
jtheulier/bouquet-java-sdk
src/main/java/io/bouquet/v4/Configuration.java
1449
/******************************************************************************* * Copyright 2017 julien@squidsolutions.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package io.bouquet.v4; public class Configuration { private static ApiClient defaultApiClient = new ApiClient(); /** * Get the default API client, which would be used when creating API * instances without providing an API client. * * @return Default API client */ public static ApiClient getDefaultApiClient() { return defaultApiClient; } /** * Set the default API client, which would be used when creating API * instances without providing an API client. * * @param apiClient API client */ public static void setDefaultApiClient(ApiClient apiClient) { defaultApiClient = apiClient; } }
apache-2.0
anuragagrawal1/simple-mvn-project
simple-maven-common/src/main/java/com/evolutionnext/model/Album.java
722
package com.evolutionnext.model; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @Entity public class Album { private String artistName; public Album() { } private String name; public Album(String s) { this.name = s; } public String getName() { return name; } public void setName(String name) { this.name = name; } private Long id; @Id @GeneratedValue(strategy = GenerationType.AUTO) public Long getId() { return id; } public void setId(Long id) { this.id = id; } }
apache-2.0
ecki/commons-vfs
commons-vfs2/src/test/java/org/apache/commons/vfs2/provider/test/GenericFileNameTestCase.java
6518
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.provider.test; import org.apache.commons.vfs2.AbstractVfsTestCase; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.provider.GenericFileName; import org.apache.commons.vfs2.provider.URLFileNameParser; import org.junit.Test; /** * Some GenericFileName test cases. */ public class GenericFileNameTestCase extends AbstractVfsTestCase { /** * Tests error handling in URI parser. */ @Test public void testBadlyFormedUri() throws Exception { // Does not start with ftp:// testBadlyFormedUri("ftp:", "vfs.provider/missing-double-slashes.error"); testBadlyFormedUri("ftp:/", "vfs.provider/missing-double-slashes.error"); testBadlyFormedUri("ftp:a", "vfs.provider/missing-double-slashes.error"); // Missing hostname testBadlyFormedUri("ftp://", "vfs.provider/missing-hostname.error"); testBadlyFormedUri("ftp://:21/file", "vfs.provider/missing-hostname.error"); testBadlyFormedUri("ftp:///file", "vfs.provider/missing-hostname.error"); // Empty port testBadlyFormedUri("ftp://host:", "vfs.provider/missing-port.error"); testBadlyFormedUri("ftp://host:/file", "vfs.provider/missing-port.error"); testBadlyFormedUri("ftp://host:port/file", "vfs.provider/missing-port.error"); // Missing absolute path testBadlyFormedUri("ftp://host:90a", "vfs.provider/missing-hostname-path-sep.error"); testBadlyFormedUri("ftp://host?a", "vfs.provider/missing-hostname-path-sep.error"); } /** * Tests that parsing a URI fails with the expected error. */ private void testBadlyFormedUri(final String uri, final String errorMsg) { try { new URLFileNameParser(80).parseUri(null, null, uri); fail(); } catch (final FileSystemException e) { assertSameMessage(errorMsg, uri, e); } } /** * Tests parsing a URI into its parts. */ @Test public void testParseUri() throws Exception { final URLFileNameParser urlParser = new URLFileNameParser(21); // Simple name GenericFileName name = (GenericFileName) urlParser.parseUri(null, null, "ftp://hostname/file"); assertEquals("ftp", name.getScheme()); assertNull(name.getUserName()); assertNull(name.getPassword()); assertEquals("hostname", name.getHostName()); assertEquals(21, name.getPort()); assertEquals(name.getDefaultPort(), name.getPort()); assertEquals("/file", name.getPath()); assertEquals("ftp://hostname/", name.getRootURI()); assertEquals("ftp://hostname/file", name.getURI()); // Name with port name = (GenericFileName) urlParser.parseUri(null, null, "ftp://hostname:9090/file"); assertEquals("ftp", name.getScheme()); assertNull(name.getUserName()); assertNull(name.getPassword()); assertEquals("hostname", name.getHostName()); assertEquals(9090, name.getPort()); assertEquals("/file", name.getPath()); assertEquals("ftp://hostname:9090/", name.getRootURI()); assertEquals("ftp://hostname:9090/file", name.getURI()); // Name with no path name = (GenericFileName) urlParser.parseUri(null, null, "ftp://hostname"); assertEquals("ftp", name.getScheme()); assertNull(name.getUserName()); assertNull(name.getPassword()); assertEquals("hostname", name.getHostName()); assertEquals(21, name.getPort()); assertEquals("/", name.getPath()); assertEquals("ftp://hostname/", name.getRootURI()); assertEquals("ftp://hostname/", name.getURI()); // Name with username name = (GenericFileName) urlParser.parseUri(null, null, "ftp://user@hostname/file"); assertEquals("ftp", name.getScheme()); assertEquals("user", name.getUserName()); assertNull(name.getPassword()); assertEquals("hostname", name.getHostName()); assertEquals(21, name.getPort()); assertEquals("/file", name.getPath()); assertEquals("ftp://user@hostname/", name.getRootURI()); assertEquals("ftp://user@hostname/file", name.getURI()); // Name with username and password name = (GenericFileName) urlParser.parseUri(null, null, "ftp://user:password@hostname/file"); assertEquals("ftp", name.getScheme()); assertEquals("user", name.getUserName()); assertEquals("password", name.getPassword()); assertEquals("hostname", name.getHostName()); assertEquals(21, name.getPort()); assertEquals("/file", name.getPath()); assertEquals("ftp://user:password@hostname/", name.getRootURI()); assertEquals("ftp://user:password@hostname/file", name.getURI()); // Encoded username and password: %75 -> 'u', %40 -> '@' name = (GenericFileName) urlParser.parseUri(null, null, "ftp://%75ser%3A:%40@hostname"); assertEquals("ftp", name.getScheme()); assertEquals("user:", name.getUserName()); assertEquals("@", name.getPassword()); assertEquals("hostname", name.getHostName()); assertEquals(21, name.getPort()); assertEquals("/", name.getPath()); // RFC 2396: The ':' character in a userinfo does not have to be escaped/percent-encoded, it is NOT RECOMMENDED for use. // RFC 3986: The ':' character in a userinfo is deprecated. // See also https://issues.apache.org/jira/browse/VFS-810 assertEquals("ftp://user::%40@hostname/", name.getRootURI()); assertEquals("ftp://user::%40@hostname/", name.getURI()); } }
apache-2.0
mlaggner/tinyMediaManager
src/org/tinymediamanager/ui/tvshows/dialogs/TvShowEpisodeEditorDialog.java
27331
/* * Copyright 2012 - 2015 Manuel Laggner * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.tinymediamanager.ui.tvshows.dialogs; import java.awt.BorderLayout; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.File; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map.Entry; import java.util.ResourceBundle; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.AbstractAction; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSpinner; import javax.swing.JTable; import javax.swing.JTextArea; import javax.swing.JTextField; import javax.swing.SpinnerDateModel; import javax.swing.SpinnerNumberModel; import javax.swing.SwingWorker; import org.apache.commons.lang3.StringUtils; import org.jdesktop.beansbinding.AutoBinding.UpdateStrategy; import org.jdesktop.beansbinding.BeanProperty; import org.jdesktop.observablecollections.ObservableCollections; import org.jdesktop.swingbinding.JListBinding; import org.jdesktop.swingbinding.JTableBinding; import org.jdesktop.swingbinding.SwingBindings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.tinymediamanager.Globals; import org.tinymediamanager.core.entities.MediaFile; import org.tinymediamanager.core.tvshow.TvShowList; import org.tinymediamanager.core.tvshow.TvShowScrapers; import org.tinymediamanager.core.tvshow.entities.TvShowActor; import org.tinymediamanager.core.tvshow.entities.TvShowEpisode; import org.tinymediamanager.scraper.ITvShowMetadataProvider; import org.tinymediamanager.scraper.MediaArtwork; import org.tinymediamanager.scraper.MediaArtwork.MediaArtworkType; import org.tinymediamanager.scraper.MediaEpisode; import org.tinymediamanager.scraper.MediaMetadata; import org.tinymediamanager.scraper.MediaScrapeOptions; import org.tinymediamanager.scraper.MediaType; import org.tinymediamanager.ui.EqualsLayout; import org.tinymediamanager.ui.IconManager; import org.tinymediamanager.ui.TmmUIHelper; import org.tinymediamanager.ui.UTF8Control; import org.tinymediamanager.ui.components.AutocompleteComboBox; import org.tinymediamanager.ui.components.ImageLabel; import org.tinymediamanager.ui.dialogs.TmmDialog; import com.jgoodies.forms.factories.FormFactory; import com.jgoodies.forms.layout.ColumnSpec; import com.jgoodies.forms.layout.FormLayout; import com.jgoodies.forms.layout.RowSpec; /** * The Class TvShowEpisodeScrapeDialog. * * @author Manuel Laggner */ public class TvShowEpisodeEditorDialog extends TmmDialog implements ActionListener { private static final long serialVersionUID = 7702248909791283043L; /** * @wbp.nls.resourceBundle messages */ private static final ResourceBundle BUNDLE = ResourceBundle.getBundle("messages", new UTF8Control()); //$NON-NLS-1$ private static final Logger LOGGER = LoggerFactory.getLogger(TvShowChooserDialog.class); private static final Date INITIAL_DATE = new Date(0); private TvShowList tvShowList = TvShowList.getInstance(); private TvShowEpisode episodeToEdit; private List<TvShowActor> cast = ObservableCollections.observableList(new ArrayList<TvShowActor>()); private List<String> tags = ObservableCollections.observableList(new ArrayList<String>()); private boolean continueQueue = true; private JTextField tfTitle; private JLabel lblFilename; private JSpinner spEpisode; private JSpinner spSeason; private JSpinner spRating; private JSpinner spDvdSeason; private JSpinner spDvdEpisode; private JCheckBox cbDvdOrder; private JSpinner spFirstAired; private JSpinner spDateAdded; private JCheckBox chckbxWatched; private ImageLabel lblThumb; private JTextArea taPlot; private JTextField tfDirector; private JTextField tfWriter; private JTable tableGuests; private JComboBox cbTags; private JList listTags; private JTableBinding<TvShowActor, List<TvShowActor>, JTable> jTableBinding; private JListBinding<String, List<String>, JList> jListBinding; /** * Instantiates a new tv show episode scrape dialog. * * @param episode * the episode * @param inQueue * the in queue */ public TvShowEpisodeEditorDialog(TvShowEpisode episode, boolean inQueue) { super(BUNDLE.getString("tvshowepisode.scrape"), "tvShowEpisodeScraper"); //$NON-NLS-1$ setBounds(5, 5, 964, 632); this.episodeToEdit = episode; getContentPane().setLayout(new BorderLayout()); { JPanel contentPanel = new JPanel(); getContentPane().add(contentPanel, BorderLayout.CENTER); contentPanel.setLayout(new FormLayout( new ColumnSpec[] { FormFactory.LABEL_COMPONENT_GAP_COLSPEC, FormFactory.DEFAULT_COLSPEC, FormFactory.RELATED_GAP_COLSPEC, ColumnSpec.decode("100px"), FormFactory.RELATED_GAP_COLSPEC, ColumnSpec.decode("default:grow"), FormFactory.RELATED_GAP_COLSPEC, ColumnSpec.decode("120px"), FormFactory.RELATED_GAP_COLSPEC, FormFactory.DEFAULT_COLSPEC, FormFactory.RELATED_GAP_COLSPEC, ColumnSpec.decode("300px:grow"), FormFactory.LABEL_COMPONENT_GAP_COLSPEC, }, new RowSpec[] { FormFactory.LINE_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, RowSpec.decode("default:grow"), FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, RowSpec.decode("default:grow"), FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, FormFactory.RELATED_GAP_ROWSPEC, })); JLabel lblFilenameT = new JLabel(BUNDLE.getString("metatag.path")); //$NON-NLS-1$ contentPanel.add(lblFilenameT, "2, 2, right, default"); lblFilename = new JLabel(""); contentPanel.add(lblFilename, "4, 2, 9, 1, left, bottom"); JLabel lblTitle = new JLabel(BUNDLE.getString("metatag.title")); //$NON-NLS-1$ contentPanel.add(lblTitle, "2, 4, right, default"); tfTitle = new JTextField(); contentPanel.add(tfTitle, "4, 4, 9, 1"); tfTitle.setColumns(10); JLabel lblSeason = new JLabel(BUNDLE.getString("metatag.season")); //$NON-NLS-1$ contentPanel.add(lblSeason, "2, 6, right, default"); spSeason = new JSpinner(); contentPanel.add(spSeason, "4, 6"); JLabel lblEpisode = new JLabel(BUNDLE.getString("metatag.episode")); //$NON-NLS-1$ contentPanel.add(lblEpisode, "6, 6, right, default"); spEpisode = new JSpinner(); contentPanel.add(spEpisode, "8, 6"); JLabel lblDvdSeason = new JLabel(BUNDLE.getString("metatag.dvdseason")); //$NON-NLS-1$ contentPanel.add(lblDvdSeason, "2, 8, right, default"); spDvdSeason = new JSpinner(); contentPanel.add(spDvdSeason, "4, 8"); JLabel lblDvdEpisode = new JLabel(BUNDLE.getString("metatag.dvdepisode")); //$NON-NLS-1$ contentPanel.add(lblDvdEpisode, "6, 8, right, default"); spDvdEpisode = new JSpinner(); contentPanel.add(spDvdEpisode, "8, 8"); JLabel lblDvdOrder = new JLabel(BUNDLE.getString("metatag.dvdorder")); //$NON-NLS-1$ contentPanel.add(lblDvdOrder, "2, 10, right, default"); cbDvdOrder = new JCheckBox(""); contentPanel.add(cbDvdOrder, "4, 10"); JLabel lblRating = new JLabel(BUNDLE.getString("metatag.rating")); //$NON-NLS-1$ contentPanel.add(lblRating, "2, 12, right, default"); spRating = new JSpinner(); contentPanel.add(spRating, "4, 12"); JLabel lblFirstAired = new JLabel(BUNDLE.getString("metatag.aired")); //$NON-NLS-1$ contentPanel.add(lblFirstAired, "6, 12, right, default"); spFirstAired = new JSpinner(new SpinnerDateModel()); contentPanel.add(spFirstAired, "8, 12"); JLabel lblWatched = new JLabel(BUNDLE.getString("metatag.watched")); //$NON-NLS-1$ contentPanel.add(lblWatched, "2, 14, right, default"); chckbxWatched = new JCheckBox(""); contentPanel.add(chckbxWatched, "4, 14"); JLabel lblDateAdded = new JLabel(BUNDLE.getString("metatag.dateadded")); //$NON-NLS-1$ contentPanel.add(lblDateAdded, "6, 14, right, default"); spDateAdded = new JSpinner(new SpinnerDateModel()); contentPanel.add(spDateAdded, "8, 14"); JLabel lblPlot = new JLabel(BUNDLE.getString("metatag.plot")); //$NON-NLS-1$ contentPanel.add(lblPlot, "2, 16, right, top"); JScrollPane scrollPane = new JScrollPane(); contentPanel.add(scrollPane, "4, 16, 5, 1, fill, fill"); taPlot = new JTextArea(); taPlot.setLineWrap(true); taPlot.setWrapStyleWord(true); scrollPane.setViewportView(taPlot); lblThumb = new ImageLabel(); lblThumb.setAlternativeText(BUNDLE.getString("image.notfound.thumb")); //$NON-NLS-1$ lblThumb.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { File file = TmmUIHelper.selectFile(BUNDLE.getString("image.choose")); //$NON-NLS-1$ if (file != null && file.exists() && file.isFile()) { String fileName = file.getPath(); lblThumb.setImageUrl("file:/" + fileName); } } }); lblThumb.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); contentPanel.add(lblThumb, "10, 6, 3, 15"); JLabel lblDirector = new JLabel(BUNDLE.getString("metatag.director")); //$NON-NLS-1$ contentPanel.add(lblDirector, "2, 18, right, default"); tfDirector = new JTextField(); tfDirector.setText((String) null); tfDirector.setColumns(10); contentPanel.add(tfDirector, "4, 18, 5, 1, fill, default"); JLabel lblWriter = new JLabel(BUNDLE.getString("metatag.writer")); //$NON-NLS-1$ contentPanel.add(lblWriter, "2, 20, right, default"); tfWriter = new JTextField(); tfWriter.setText((String) null); tfWriter.setColumns(10); contentPanel.add(tfWriter, "4, 20, 5, 1, fill, default"); JLabel lblGuests = new JLabel(BUNDLE.getString("metatag.guests")); //$NON-NLS-1$ contentPanel.add(lblGuests, "2, 22, right, top"); JScrollPane scrollPaneGuests = new JScrollPane(); contentPanel.add(scrollPaneGuests, "4, 22, 5, 7, fill, fill"); tableGuests = new JTable(); scrollPaneGuests.setViewportView(tableGuests); JLabel lblTags = new JLabel(BUNDLE.getString("metatag.tags")); //$NON-NLS-1$ contentPanel.add(lblTags, "10, 22, default, top"); JScrollPane scrollPaneTags = new JScrollPane(); contentPanel.add(scrollPaneTags, "12, 22, 1, 5, fill, fill"); listTags = new JList(); scrollPaneTags.setViewportView(listTags); JButton btnAddActor = new JButton(""); btnAddActor.setMargin(new Insets(2, 2, 2, 2)); btnAddActor.setAction(new AddActorAction()); btnAddActor.setIcon(IconManager.LIST_ADD); contentPanel.add(btnAddActor, "2, 24, right, top"); JButton btnAddTag = new JButton(""); btnAddTag.setMargin(new Insets(2, 2, 2, 2)); btnAddTag.setAction(new AddTagAction()); btnAddTag.setIcon(IconManager.LIST_ADD); contentPanel.add(btnAddTag, "10, 24, right, top"); JButton btnRemoveActor = new JButton(""); btnRemoveActor.setMargin(new Insets(2, 2, 2, 2)); btnRemoveActor.setAction(new RemoveActorAction()); btnRemoveActor.setIcon(IconManager.LIST_REMOVE); contentPanel.add(btnRemoveActor, "2, 26, right, top"); JButton btnRemoveTag = new JButton(""); btnRemoveTag.setMargin(new Insets(2, 2, 2, 2)); btnRemoveTag.setAction(new RemoveTagAction()); btnRemoveTag.setIcon(IconManager.LIST_REMOVE); contentPanel.add(btnRemoveTag, "10, 26, right, top"); cbTags = new AutocompleteComboBox(tvShowList.getTagsInEpisodes().toArray()); cbTags.setEditable(true); contentPanel.add(cbTags, "12, 28, fill, default"); } { JPanel bottomPanel = new JPanel(); getContentPane().add(bottomPanel, BorderLayout.SOUTH); bottomPanel.setLayout(new FormLayout( new ColumnSpec[] { FormFactory.LABEL_COMPONENT_GAP_COLSPEC, FormFactory.DEFAULT_COLSPEC, FormFactory.RELATED_GAP_COLSPEC, FormFactory.DEFAULT_COLSPEC, FormFactory.RELATED_GAP_COLSPEC, ColumnSpec.decode("default:grow"), FormFactory.RELATED_GAP_COLSPEC, FormFactory.DEFAULT_COLSPEC, FormFactory.RELATED_GAP_COLSPEC, }, new RowSpec[] { FormFactory.LINE_GAP_ROWSPEC, RowSpec.decode("25px"), FormFactory.RELATED_GAP_ROWSPEC, })); JComboBox cbScraper = new JComboBox(TvShowScrapers.values()); cbScraper.setSelectedItem(Globals.settings.getTvShowSettings().getTvShowScraper()); bottomPanel.add(cbScraper, "2, 2, fill, default"); JButton btnScrape = new JButton(BUNDLE.getString("Button.scrape")); //$NON-NLS-1$ btnScrape.setPreferredSize(new Dimension(100, 23)); btnScrape.setMaximumSize(new Dimension(0, 0)); btnScrape.setMinimumSize(new Dimension(100, 23)); btnScrape.setActionCommand("Scrape"); btnScrape.addActionListener(this); bottomPanel.add(btnScrape, "4, 2, left, fill"); JButton btnSearch = new JButton(BUNDLE.getString("tvshowepisodechooser.search")); //$NON-NLS-1$ btnSearch.setActionCommand("Search"); btnSearch.addActionListener(this); btnSearch.setIcon(IconManager.SEARCH); bottomPanel.add(btnSearch, "6, 2, left, fill"); { JPanel buttonPane = new JPanel(); bottomPanel.add(buttonPane, "8, 2, fill, fill"); EqualsLayout layout = new EqualsLayout(5); layout.setMinWidth(100); buttonPane.setLayout(layout); JButton okButton = new JButton(BUNDLE.getString("Button.ok")); //$NON-NLS-1$ okButton.setToolTipText(BUNDLE.getString("tvshow.change")); okButton.setIcon(IconManager.APPLY); buttonPane.add(okButton); okButton.setActionCommand("OK"); okButton.addActionListener(this); JButton cancelButton = new JButton(BUNDLE.getString("Button.cancel")); //$NON-NLS-1$ cancelButton.setToolTipText(BUNDLE.getString("edit.discard")); cancelButton.setIcon(IconManager.CANCEL); buttonPane.add(cancelButton); cancelButton.setActionCommand("Cancel"); cancelButton.addActionListener(this); if (inQueue) { JButton abortButton = new JButton(BUNDLE.getString("Button.abortqueue")); //$NON-NLS-1$ abortButton.setToolTipText(BUNDLE.getString("tvshow.edit.abortqueue.desc")); //$NON-NLS-1$ abortButton.setIcon(IconManager.PROCESS_STOP); buttonPane.add(abortButton); abortButton.setActionCommand("Abort"); abortButton.addActionListener(this); } } } initDataBindings(); // fill data { MediaFile mediaFile = episodeToEdit.getMediaFiles().get(0); lblFilename.setText(mediaFile.getPath() + File.separator + mediaFile.getFilename()); tfTitle.setText(episodeToEdit.getTitle()); spSeason.setModel(new SpinnerNumberModel(episodeToEdit.getAiredSeason(), -1, Integer.MAX_VALUE, 1)); spEpisode.setModel(new SpinnerNumberModel(episodeToEdit.getAiredEpisode(), -1, Integer.MAX_VALUE, 1)); spDvdSeason.setModel(new SpinnerNumberModel(episodeToEdit.getDvdSeason(), -1, Integer.MAX_VALUE, 1)); spDvdEpisode.setModel(new SpinnerNumberModel(episodeToEdit.getDvdEpisode(), -1, Integer.MAX_VALUE, 1)); cbDvdOrder.setSelected(episodeToEdit.isDvdOrder()); SimpleDateFormat dateFormat = (SimpleDateFormat) DateFormat.getDateInstance(DateFormat.MEDIUM); // spDateAdded.setEditor(new JSpinner.DateEditor(spDateAdded, dateFormat.toPattern())); spFirstAired.setEditor(new JSpinner.DateEditor(spFirstAired, dateFormat.toPattern())); spDateAdded.setValue(episodeToEdit.getDateAdded()); if (episodeToEdit.getFirstAired() != null) { spFirstAired.setValue(episodeToEdit.getFirstAired()); } else { spFirstAired.setValue(INITIAL_DATE); } lblThumb.setImagePath(episodeToEdit.getThumb()); spRating.setModel(new SpinnerNumberModel(episodeToEdit.getRating(), 0.0, 10.0, 0.1)); chckbxWatched.setSelected(episodeToEdit.isWatched()); taPlot.setText(episodeToEdit.getPlot()); taPlot.setCaretPosition(0); tfDirector.setText(episodeToEdit.getDirector()); tfWriter.setText(episodeToEdit.getWriter()); for (TvShowActor origCast : episodeToEdit.getGuests()) { TvShowActor actor = new TvShowActor(); actor.setName(origCast.getName()); actor.setCharacter(origCast.getCharacter()); actor.setThumb(origCast.getThumb()); cast.add(actor); } for (String tag : episodeToEdit.getTags()) { tags.add(tag); } } // adjust table columns tableGuests.getColumnModel().getColumn(0).setHeaderValue(BUNDLE.getString("metatag.name")); //$NON-NLS-1$ tableGuests.getColumnModel().getColumn(1).setHeaderValue(BUNDLE.getString("metatag.role")); //$NON-NLS-1$ } /** * Shows the dialog and returns whether the work on the queue should be continued. * * @return true, if successful */ public boolean showDialog() { setVisible(true); return continueQueue; } @Override public void actionPerformed(ActionEvent e) { // assign scraped data if ("OK".equals(e.getActionCommand())) { episodeToEdit.setTitle(tfTitle.getText()); episodeToEdit.setDvdOrder(cbDvdOrder.isSelected()); episodeToEdit.setAiredSeason((Integer) spSeason.getValue()); episodeToEdit.setAiredEpisode((Integer) spEpisode.getValue()); episodeToEdit.setDvdSeason((Integer) spDvdSeason.getValue()); episodeToEdit.setDvdEpisode((Integer) spDvdEpisode.getValue()); episodeToEdit.setPlot(taPlot.getText()); double tempRating = (Double) spRating.getValue(); float rating = (float) tempRating; if (episodeToEdit.getRating() != rating) { episodeToEdit.setRating(rating); episodeToEdit.setVotes(1); } episodeToEdit.setDateAdded((Date) spDateAdded.getValue()); Date firstAiredDate = (Date) spFirstAired.getValue(); if (!firstAiredDate.equals(INITIAL_DATE)) { episodeToEdit.setFirstAired(firstAiredDate); } episodeToEdit.setWatched(chckbxWatched.isSelected()); episodeToEdit.setDirector(tfDirector.getText()); episodeToEdit.setWriter(tfWriter.getText()); episodeToEdit.setActors(cast); if (StringUtils.isNotEmpty(lblThumb.getImageUrl()) && (!lblThumb.getImageUrl().equals(episodeToEdit.getThumbUrl()) || StringUtils.isBlank(episodeToEdit.getThumb()))) { episodeToEdit.setThumbUrl(lblThumb.getImageUrl()); episodeToEdit.writeThumbImage(); } episodeToEdit.setTags(tags); episodeToEdit.saveToDb(); episodeToEdit.writeNFO(); setVisible(false); } // cancel if ("Cancel".equals(e.getActionCommand())) { setVisible(false); } // Abort queue if ("Abort".equals(e.getActionCommand())) { continueQueue = false; setVisible(false); } // scrape if ("Scrape".equals(e.getActionCommand())) { ScrapeTask task = new ScrapeTask(TvShowList.getInstance().getMetadataProvider()); task.execute(); } // search if ("Search".equals(e.getActionCommand())) { TvShowEpisodeChooserDialog dialog = new TvShowEpisodeChooserDialog(episodeToEdit, TvShowList.getInstance().getMetadataProvider()); dialog.setLocationRelativeTo(this); dialog.setVisible(true); MediaEpisode metadata = dialog.getMetadata(); if (metadata != null && StringUtils.isNotBlank(metadata.title)) { tfTitle.setText(metadata.title); taPlot.setText(metadata.plot); spEpisode.setValue(metadata.episode); spSeason.setValue(metadata.season); for (MediaArtwork ma : metadata.artwork) { if (ma.getType() == MediaArtworkType.THUMB) { lblThumb.setImageUrl(ma.getDefaultUrl()); break; } } } } } private class ScrapeTask extends SwingWorker<Void, Void> { ITvShowMetadataProvider mp; public ScrapeTask(ITvShowMetadataProvider mp) { this.mp = mp; } @Override protected Void doInBackground() throws Exception { MediaScrapeOptions options = new MediaScrapeOptions(); options.setLanguage(Globals.settings.getTvShowSettings().getScraperLanguage()); options.setCountry(Globals.settings.getTvShowSettings().getCertificationCountry()); for (Entry<String, Object> entry : episodeToEdit.getTvShow().getIds().entrySet()) { options.setId(entry.getKey(), entry.getValue().toString()); } options.setType(MediaType.TV_EPISODE); options.setId(MediaMetadata.SEASON_NR, spSeason.getValue().toString()); options.setId(MediaMetadata.EPISODE_NR, spEpisode.getValue().toString()); try { MediaMetadata metadata = mp.getEpisodeMetadata(options); // if nothing has been found -> open the search box if (metadata == null || StringUtils.isBlank(metadata.getStringValue(MediaMetadata.TITLE))) { // message JOptionPane.showMessageDialog(TvShowEpisodeEditorDialog.this, BUNDLE.getString("message.scrape.tvshowepisodefailed")); //$NON-NLS-1$ } else { tfTitle.setText(metadata.getStringValue(MediaMetadata.TITLE)); taPlot.setText(metadata.getStringValue(MediaMetadata.PLOT)); spFirstAired.setValue(parseFirstAired(metadata.getStringValue(MediaMetadata.RELEASE_DATE))); for (MediaArtwork ma : metadata.getFanart()) { if (ma.getType() == MediaArtworkType.THUMB) { lblThumb.setImageUrl(ma.getDefaultUrl()); break; } } } } catch (Exception e) { LOGGER.warn("Error getting metadata " + e.getMessage()); } return null; } private Date parseFirstAired(String aired) { try { Pattern date = Pattern.compile("([0-9]{2})[_\\.-]([0-9]{2})[_\\.-]([0-9]{4})"); Matcher m = date.matcher(aired); if (m.find()) { return new SimpleDateFormat("dd-MM-yyyy").parse(m.group(1) + "-" + m.group(2) + "-" + m.group(3)); } else { date = Pattern.compile("([0-9]{4})[_\\.-]([0-9]{2})[_\\.-]([0-9]{2})"); m = date.matcher(aired); if (m.find()) { return new SimpleDateFormat("yyyy-MM-dd").parse(m.group(1) + "-" + m.group(2) + "-" + m.group(3)); } else { return INITIAL_DATE; } } } catch (Exception e) { return INITIAL_DATE; } } } protected void initDataBindings() { jTableBinding = SwingBindings.createJTableBinding(UpdateStrategy.READ, cast, tableGuests); // BeanProperty<TvShowActor, String> movieCastBeanProperty = BeanProperty.create("name"); jTableBinding.addColumnBinding(movieCastBeanProperty); // BeanProperty<TvShowActor, String> movieCastBeanProperty_1 = BeanProperty.create("character"); jTableBinding.addColumnBinding(movieCastBeanProperty_1); // jTableBinding.bind(); // jListBinding = SwingBindings.createJListBinding(UpdateStrategy.READ, tags, listTags); jListBinding.bind(); // } @Override public void dispose() { super.dispose(); jTableBinding.unbind(); jListBinding.unbind(); } @Override public void pack() { // do not let it pack - it looks weird } private class AddTagAction extends AbstractAction { private static final long serialVersionUID = 5968029647764173330L; public AddTagAction() { putValue(SHORT_DESCRIPTION, BUNDLE.getString("tag.add")); //$NON-NLS-1$ } @Override public void actionPerformed(ActionEvent e) { String newTag = (String) cbTags.getSelectedItem(); if (StringUtils.isBlank(newTag)) { return; } boolean tagFound = false; // search if this tag already has been added for (String tag : tags) { if (tag.equals(newTag)) { tagFound = true; break; } } // add tag if (!tagFound) { tags.add(newTag); } } } private class RemoveTagAction extends AbstractAction { private static final long serialVersionUID = -4799506776650330500L; public RemoveTagAction() { putValue(SHORT_DESCRIPTION, BUNDLE.getString("tag.remove")); //$NON-NLS-1$ } @Override public void actionPerformed(ActionEvent e) { String tag = (String) listTags.getSelectedValue(); tags.remove(tag); } } private class AddActorAction extends AbstractAction { private static final long serialVersionUID = -5879601617842300526L; public AddActorAction() { putValue(SHORT_DESCRIPTION, BUNDLE.getString("cast.actor.add")); //$NON-NLS-1$ } @Override public void actionPerformed(ActionEvent e) { TvShowActor actor = new TvShowActor(BUNDLE.getString("cast.actor.unknown"), BUNDLE.getString("cast.role.unknown")); //$NON-NLS-1$ cast.add(0, actor); } } private class RemoveActorAction extends AbstractAction { private static final long serialVersionUID = 6970920169867315771L; public RemoveActorAction() { putValue(SHORT_DESCRIPTION, BUNDLE.getString("cast.actor.remove")); //$NON-NLS-1$ } @Override public void actionPerformed(ActionEvent e) { int row = tableGuests.getSelectedRow(); if (row > -1) { row = tableGuests.convertRowIndexToModel(row); cast.remove(row); } } } }
apache-2.0
vicegd/vilang
xtext/org.vilang/src-gen/org/vilang/vilang/impl/ModelTypeImpl.java
3896
/** */ package org.vilang.vilang.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.vilang.vilang.ModelType; import org.vilang.vilang.VilangPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Model Type</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.vilang.vilang.impl.ModelTypeImpl#isInstance <em>Instance</em>}</li> * </ul> * * @generated */ public class ModelTypeImpl extends MinimalEObjectImpl.Container implements ModelType { /** * The default value of the '{@link #isInstance() <em>Instance</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isInstance() * @generated * @ordered */ protected static final boolean INSTANCE_EDEFAULT = false; /** * The cached value of the '{@link #isInstance() <em>Instance</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isInstance() * @generated * @ordered */ protected boolean instance = INSTANCE_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ModelTypeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return VilangPackage.Literals.MODEL_TYPE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isInstance() { return instance; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInstance(boolean newInstance) { boolean oldInstance = instance; instance = newInstance; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, VilangPackage.MODEL_TYPE__INSTANCE, oldInstance, instance)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case VilangPackage.MODEL_TYPE__INSTANCE: return isInstance(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case VilangPackage.MODEL_TYPE__INSTANCE: setInstance((Boolean)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case VilangPackage.MODEL_TYPE__INSTANCE: setInstance(INSTANCE_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case VilangPackage.MODEL_TYPE__INSTANCE: return instance != INSTANCE_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (instance: "); result.append(instance); result.append(')'); return result.toString(); } } //ModelTypeImpl
apache-2.0
ServicioReparaciones/ServicioReparaciones
ServicioReparaciones-web/src/main/java/com/servicio/reparaciones/web/bean/CredencialBean.java
4343
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.servicio.reparaciones.web.bean; import com.servicio.reparaciones.modelo.nosql.Usuario; import com.servicio.reparaciones.servicio.UsuarioServicio; import com.servicio.reparaciones.web.auth.AuthorizationListener; import com.servicio.reparaciones.web.auth.GoogleAnalyticsTracking; import com.servicio.reparaciones.web.util.FacesUtil; import java.io.IOException; import java.io.Serializable; import java.util.logging.Level; import java.util.logging.Logger; import javax.enterprise.context.SessionScoped; import javax.faces.context.FacesContext; import javax.faces.event.ActionEvent; import javax.inject.Named; import javax.inject.Inject; import javax.servlet.http.HttpSession; import org.primefaces.context.RequestContext; /** * * @author luis */ @Named(value = "credencialBean") @SessionScoped public class CredencialBean implements Serializable { private static final Logger LOG = Logger.getLogger(AuthorizationListener.class.getName()); private static final long serialVersionUID = -3799042563216409371L; private Usuario userSession = new Usuario(); private String confirmationPassword; private String password; private GoogleAnalyticsTracking tracking; @Inject private UsuarioServicio usuarioService; public CredencialBean() { try { this.tracking = new GoogleAnalyticsTracking("UA-102903071-1"); } catch (IOException ex) { LOG.log(Level.SEVERE, "GoogleAnalyticsTracking", ex.getMessage()); } } private void init() { this.userSession = new Usuario(); } public void startSession(Usuario usuario) { this.userSession = usuario; this.password = this.userSession.getPassword(); this.loadDataSession(this.userSession); } private void loadDataSession(Usuario usuario) { FacesContext.getCurrentInstance().getExternalContext().getSessionMap(). put("codigo", usuario.getCodigo()); FacesContext.getCurrentInstance().getExternalContext().getSessionMap(). put("username", usuario.getUsername()); FacesContext.getCurrentInstance().getExternalContext().getSessionMap(). put("admin", usuario.getAdmin()); } public void modifyInfo(ActionEvent evt) { this.userSession.setPassword(this.password); Boolean exito = this.usuarioService.update(this.userSession); if (exito) { FacesUtil.addMessageInfo("Se actualizo exitosamente."); } else { FacesUtil.addMessageError(null, "No actualizo."); } } public void changePassword(ActionEvent evt) { if (this.userSession.getPassword().equals(this.confirmationPassword)) { Boolean exito = this.usuarioService.updatePassword(this.userSession); if (exito) { FacesUtil.addMessageInfo("Se modifico el password con exito."); } else { FacesUtil.addMessageError(null, "No se modifico el password con exito."); } } else { FacesUtil.addMessageInfo("El password no coincide."); } } public Usuario getUserSession() { return userSession; } public void setUserSession(Usuario userSession) { this.userSession = userSession; } public String getConfirmationPassword() { return confirmationPassword; } public void setConfirmationPassword(String confirmationPassword) { this.confirmationPassword = confirmationPassword; } public void logout(ActionEvent event) { String url = FacesContext.getCurrentInstance().getExternalContext().getRequestContextPath() + "/faces/index.xhtml"; RequestContext context = RequestContext.getCurrentInstance(); FacesContext facesContext = FacesContext.getCurrentInstance(); HttpSession session = (HttpSession) facesContext.getExternalContext().getSession(false); session.removeAttribute(this.userSession.getUsername()); session.invalidate(); this.init(); context.addCallbackParam("loggerOut", true); context.addCallbackParam("ruta", url); } }
apache-2.0
eemirtekin/Sakai-10.6-TR
entitybroker/api/src/java/org/sakaiproject/entitybroker/providers/EntityRequestHandler.java
3842
/** * $Id: EntityRequestHandler.java 105077 2012-02-24 22:54:29Z ottenhoff@longsight.com $ * $URL: https://source.sakaiproject.org/svn/entitybroker/tags/sakai-10.6/api/src/java/org/sakaiproject/entitybroker/providers/EntityRequestHandler.java $ * EntityRequestHandler.java - entity-broker - Apr 6, 2008 9:03:03 AM - azeckoski ************************************************************************** * Copyright (c) 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.entitybroker.providers; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.sakaiproject.entitybroker.EntityReference; import org.sakaiproject.entitybroker.exception.EntityException; /** * Handles the URL/request processing for an entity in a central location * * @author Aaron Zeckoski (aaron@caret.cam.ac.uk) */ public interface EntityRequestHandler { /** * The reserved word used to trigger entity descriptions */ public static String DESCRIBE = "describe"; public static String SLASH_DESCRIBE = EntityReference.SEPARATOR + DESCRIBE; /** * The reserved word used to trigger batch operations */ public static String BATCH = "batch"; public static String SLASH_BATCH = EntityReference.SEPARATOR + BATCH; /** * This is the name of the header which will contain the id of newly created entities */ public static String HEADER_ENTITY_ID = "EntityId"; /** * This is the name of the header which will contain the reference of created/updated entities */ public static String HEADER_ENTITY_REFERENCE = "EntityReference"; /** * This is the name of the header that will contain created/updated entities SHOW URL */ public static String HEADER_ENTITY_URL = "Location"; /** * The id used in generated URLs */ public static String FAKE_ID = ":ID:"; /** * This is the special indicator used to denote that POST should be translated to a PUT or DELETE * in order to compensate for browser limitations, * Example: /people/1?_method=PUT */ public static String COMPENSATE_METHOD = "_method"; /** * Handles the servlet request response cycle for all direct servlet accesses, * logically, we only want to let this request continue on if the entity exists AND * there is an http access provider to handle it AND the user can access it * (there is some auth completed already or no auth is required) * * @param req the servlet request * @param res the servlet response * @param path the path from the request (if null it will be generated from the req) * @return the entity reference that was handled as part of this request * @throws EntityException if entity could not be found or failure parsing */ public String handleEntityAccess(HttpServletRequest req, HttpServletResponse res, String path); /** * Handles an error which occurs by sending an email and logging extra info about the failure * @param req the current request * @param error the current error that occurred * @return the comprehensive error message */ public String handleEntityError(HttpServletRequest req, Throwable error); }
apache-2.0
slipperyseal/B9
src/test/java/net/catchpole/B9/codec/bean/PersonBean.java
1757
package net.catchpole.B9.codec.bean; public class PersonBean { private String name; private Integer age; private Integer cats; private boolean alive; public String getName() { return name; } public void setName(String name) { this.name = name; } public Integer getAge() { return age; } public void setAge(Integer age) { this.age = age; } public boolean isAlive() { return alive; } public void setAlive(boolean alive) { this.alive = alive; } public Integer getCats() { return cats; } public void setCats(Integer cats) { this.cats = cats; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PersonBean that = (PersonBean) o; if (alive != that.alive) return false; if (name != null ? !name.equals(that.name) : that.name != null) return false; if (age != null ? !age.equals(that.age) : that.age != null) return false; return !(cats != null ? !cats.equals(that.cats) : that.cats != null); } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; result = 31 * result + (age != null ? age.hashCode() : 0); result = 31 * result + (cats != null ? cats.hashCode() : 0); result = 31 * result + (alive ? 1 : 0); return result; } @Override public String toString() { return "PersonBean{" + "name='" + name + '\'' + ", age=" + age + ", cats=" + cats + ", alive=" + alive + '}'; } }
apache-2.0
bushuyev/niomongo
src/main/java/com/niomongo/processing/common/JWTCheckCondition.java
2230
package com.niomongo.processing.common; import com.niomongo.RequestProcessor; import com.niomongo.conversion.json.BytesRange; import com.niomongo.conversion.json.JsonObject; import com.niomongo.conversion.json.Value; import com.niomongo.processing.Condition; import com.niomongo.processing.JWTAction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; import java.util.List; /** * Created with IntelliJ IDEA. * User: Yevgen Bushuyev * Date: 27.10.15. */ public abstract class JWTCheckCondition implements Condition { private final static Logger logger = LoggerFactory.getLogger(JWTCheckCondition.class); public boolean check(RequestProcessor requestProcessor) { String claimValue = (String) requestProcessor.getJwtClaims().getClaimValue(JWTAction.SUBJECT_ID); byte[] claimBytes = claimValue.getBytes(); if (logger.isDebugEnabled()) { logger.debug("claim_id: {}", claimValue); } return doCheck(requestProcessor, claimBytes); } protected abstract boolean doCheck(RequestProcessor requestProcessor, byte[] claimBytes); public static abstract class Many<T extends JsonObject> extends JWTCheckCondition { @Override public boolean doCheck(RequestProcessor requestProcessor, byte[] claimBytes) { List<T> providedClaim = getClaimsList(requestProcessor); if (providedClaim.size() == 0) { return false; } for (T claim : providedClaim) { if (getClaimValue(claim).compare(claimBytes)){ return false; } } return true; } protected abstract Value getClaimValue(T claim); protected abstract List<T> getClaimsList(RequestProcessor requestProcessor); } public static abstract class One<T extends JsonObject> extends JWTCheckCondition { @Override protected boolean doCheck(RequestProcessor requestProcessor, byte[] claimBytes) { T providedClaim = getClaimObject(requestProcessor); Value claimValue = getClaimValue((T) providedClaim); return claimValue != null && claimValue.compare(claimBytes); } protected abstract Value getClaimValue(T claim); protected abstract T getClaimObject(RequestProcessor requestProcessor); } }
apache-2.0
uh-cs-iotlab/kahvihub
core/src/main/java/fi/helsinki/cs/iot/hub/model/feed/FeedType.java
846
/* * fi.helsinki.cs.iot.hub.model.feed.FeedType * v0.1 * 2015 * * Copyright 2015 University of Helsinki * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. * See the License for the specific language governing permissions * and limitations under the License. */ package fi.helsinki.cs.iot.hub.model.feed; /** * * @author Julien Mineraud <julien.mineraud@cs.helsinki.fi> * */ public enum FeedType { ATOMIC, COMPOSED, EXECUTABLE }
apache-2.0
eug48/hapi-fhir
hapi-fhir-jaxrsserver-example/src/main/java/ca/uhn/fhir/jaxrs/server/example/JaxRsConformanceProvider.java
1505
package ca.uhn.fhir.jaxrs.server.example; import java.util.concurrent.ConcurrentHashMap; import javax.ejb.Stateless; import javax.inject.Inject; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import ca.uhn.fhir.jaxrs.server.AbstractJaxRsConformanceProvider; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.IResourceProvider; /** * Conformance Rest Service * * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ @Path("") @Stateless @Produces({ MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML }) public class JaxRsConformanceProvider extends AbstractJaxRsConformanceProvider { private static final String SERVER_VERSION = "1.0.0"; private static final String SERVER_DESCRIPTION = "Jax-Rs Test Example Description"; private static final String SERVER_NAME = "Jax-Rs Test Example"; @Inject private JaxRsPatientRestProvider patientProvider; /** * Standard Constructor */ public JaxRsConformanceProvider() { super(SERVER_VERSION, SERVER_DESCRIPTION, SERVER_NAME); } @Override protected ConcurrentHashMap<Class<? extends IResourceProvider>, IResourceProvider> getProviders() { ConcurrentHashMap<Class<? extends IResourceProvider>, IResourceProvider> map = new ConcurrentHashMap<Class<? extends IResourceProvider>, IResourceProvider>(); map.put(JaxRsConformanceProvider.class, this); map.put(JaxRsPatientRestProvider.class, patientProvider); return map; } }
apache-2.0
vam-google/google-cloud-java
google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterStatus.java
43260
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataproc/v1beta2/clusters.proto package com.google.cloud.dataproc.v1beta2; /** * * * <pre> * The status of a cluster and its instances. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1beta2.ClusterStatus} */ public final class ClusterStatus extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.ClusterStatus) ClusterStatusOrBuilder { private static final long serialVersionUID = 0L; // Use ClusterStatus.newBuilder() to construct. private ClusterStatus(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ClusterStatus() { state_ = 0; detail_ = ""; substate_ = 0; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ClusterStatus( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); state_ = rawValue; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); detail_ = s; break; } case 26: { com.google.protobuf.Timestamp.Builder subBuilder = null; if (stateStartTime_ != null) { subBuilder = stateStartTime_.toBuilder(); } stateStartTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(stateStartTime_); stateStartTime_ = subBuilder.buildPartial(); } break; } case 32: { int rawValue = input.readEnum(); substate_ = rawValue; break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1beta2.ClustersProto .internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1beta2.ClustersProto .internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1beta2.ClusterStatus.class, com.google.cloud.dataproc.v1beta2.ClusterStatus.Builder.class); } /** * * * <pre> * The cluster state. * </pre> * * Protobuf enum {@code google.cloud.dataproc.v1beta2.ClusterStatus.State} */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * The cluster state is unknown. * </pre> * * <code>UNKNOWN = 0;</code> */ UNKNOWN(0), /** * * * <pre> * The cluster is being created and set up. It is not ready for use. * </pre> * * <code>CREATING = 1;</code> */ CREATING(1), /** * * * <pre> * The cluster is currently running and healthy. It is ready for use. * </pre> * * <code>RUNNING = 2;</code> */ RUNNING(2), /** * * * <pre> * The cluster encountered an error. It is not ready for use. * </pre> * * <code>ERROR = 3;</code> */ ERROR(3), /** * * * <pre> * The cluster is being deleted. It cannot be used. * </pre> * * <code>DELETING = 4;</code> */ DELETING(4), /** * * * <pre> * The cluster is being updated. It continues to accept and process jobs. * </pre> * * <code>UPDATING = 5;</code> */ UPDATING(5), UNRECOGNIZED(-1), ; /** * * * <pre> * The cluster state is unknown. * </pre> * * <code>UNKNOWN = 0;</code> */ public static final int UNKNOWN_VALUE = 0; /** * * * <pre> * The cluster is being created and set up. It is not ready for use. * </pre> * * <code>CREATING = 1;</code> */ public static final int CREATING_VALUE = 1; /** * * * <pre> * The cluster is currently running and healthy. It is ready for use. * </pre> * * <code>RUNNING = 2;</code> */ public static final int RUNNING_VALUE = 2; /** * * * <pre> * The cluster encountered an error. It is not ready for use. * </pre> * * <code>ERROR = 3;</code> */ public static final int ERROR_VALUE = 3; /** * * * <pre> * The cluster is being deleted. It cannot be used. * </pre> * * <code>DELETING = 4;</code> */ public static final int DELETING_VALUE = 4; /** * * * <pre> * The cluster is being updated. It continues to accept and process jobs. * </pre> * * <code>UPDATING = 5;</code> */ public static final int UPDATING_VALUE = 5; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static State valueOf(int value) { return forNumber(value); } public static State forNumber(int value) { switch (value) { case 0: return UNKNOWN; case 1: return CREATING; case 2: return RUNNING; case 3: return ERROR; case 4: return DELETING; case 5: return UPDATING; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.dataproc.v1beta2.ClusterStatus.getDescriptor().getEnumTypes().get(0); } private static final State[] VALUES = values(); public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private State(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.dataproc.v1beta2.ClusterStatus.State) } /** * * * <pre> * The cluster substate. * </pre> * * Protobuf enum {@code google.cloud.dataproc.v1beta2.ClusterStatus.Substate} */ public enum Substate implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * The cluster substate is unknown. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * * * <pre> * The cluster is known to be in an unhealthy state * (for example, critical daemons are not running or HDFS capacity is * exhausted). * Applies to RUNNING state. * </pre> * * <code>UNHEALTHY = 1;</code> */ UNHEALTHY(1), /** * * * <pre> * The agent-reported status is out of date (may occur if * Cloud Dataproc loses communication with Agent). * Applies to RUNNING state. * </pre> * * <code>STALE_STATUS = 2;</code> */ STALE_STATUS(2), UNRECOGNIZED(-1), ; /** * * * <pre> * The cluster substate is unknown. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * * * <pre> * The cluster is known to be in an unhealthy state * (for example, critical daemons are not running or HDFS capacity is * exhausted). * Applies to RUNNING state. * </pre> * * <code>UNHEALTHY = 1;</code> */ public static final int UNHEALTHY_VALUE = 1; /** * * * <pre> * The agent-reported status is out of date (may occur if * Cloud Dataproc loses communication with Agent). * Applies to RUNNING state. * </pre> * * <code>STALE_STATUS = 2;</code> */ public static final int STALE_STATUS_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Substate valueOf(int value) { return forNumber(value); } public static Substate forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNHEALTHY; case 2: return STALE_STATUS; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Substate> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Substate> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Substate>() { public Substate findValueByNumber(int number) { return Substate.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.dataproc.v1beta2.ClusterStatus.getDescriptor().getEnumTypes().get(1); } private static final Substate[] VALUES = values(); public static Substate valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Substate(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.dataproc.v1beta2.ClusterStatus.Substate) } public static final int STATE_FIELD_NUMBER = 1; private int state_; /** * * * <pre> * Output only. The cluster's state. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.State state = 1;</code> */ public int getStateValue() { return state_; } /** * * * <pre> * Output only. The cluster's state. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.State state = 1;</code> */ public com.google.cloud.dataproc.v1beta2.ClusterStatus.State getState() { @SuppressWarnings("deprecation") com.google.cloud.dataproc.v1beta2.ClusterStatus.State result = com.google.cloud.dataproc.v1beta2.ClusterStatus.State.valueOf(state_); return result == null ? com.google.cloud.dataproc.v1beta2.ClusterStatus.State.UNRECOGNIZED : result; } public static final int DETAIL_FIELD_NUMBER = 2; private volatile java.lang.Object detail_; /** * * * <pre> * Output only. Optional details of cluster's state. * </pre> * * <code>string detail = 2;</code> */ public java.lang.String getDetail() { java.lang.Object ref = detail_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); detail_ = s; return s; } } /** * * * <pre> * Output only. Optional details of cluster's state. * </pre> * * <code>string detail = 2;</code> */ public com.google.protobuf.ByteString getDetailBytes() { java.lang.Object ref = detail_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); detail_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STATE_START_TIME_FIELD_NUMBER = 3; private com.google.protobuf.Timestamp stateStartTime_; /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public boolean hasStateStartTime() { return stateStartTime_ != null; } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public com.google.protobuf.Timestamp getStateStartTime() { return stateStartTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : stateStartTime_; } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { return getStateStartTime(); } public static final int SUBSTATE_FIELD_NUMBER = 4; private int substate_; /** * * * <pre> * Output only. Additional state information that includes * status reported by the agent. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.Substate substate = 4;</code> */ public int getSubstateValue() { return substate_; } /** * * * <pre> * Output only. Additional state information that includes * status reported by the agent. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.Substate substate = 4;</code> */ public com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate getSubstate() { @SuppressWarnings("deprecation") com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate result = com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate.valueOf(substate_); return result == null ? com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (state_ != com.google.cloud.dataproc.v1beta2.ClusterStatus.State.UNKNOWN.getNumber()) { output.writeEnum(1, state_); } if (!getDetailBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, detail_); } if (stateStartTime_ != null) { output.writeMessage(3, getStateStartTime()); } if (substate_ != com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate.UNSPECIFIED.getNumber()) { output.writeEnum(4, substate_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (state_ != com.google.cloud.dataproc.v1beta2.ClusterStatus.State.UNKNOWN.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, state_); } if (!getDetailBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, detail_); } if (stateStartTime_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getStateStartTime()); } if (substate_ != com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, substate_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataproc.v1beta2.ClusterStatus)) { return super.equals(obj); } com.google.cloud.dataproc.v1beta2.ClusterStatus other = (com.google.cloud.dataproc.v1beta2.ClusterStatus) obj; boolean result = true; result = result && state_ == other.state_; result = result && getDetail().equals(other.getDetail()); result = result && (hasStateStartTime() == other.hasStateStartTime()); if (hasStateStartTime()) { result = result && getStateStartTime().equals(other.getStateStartTime()); } result = result && substate_ == other.substate_; result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; hash = (37 * hash) + DETAIL_FIELD_NUMBER; hash = (53 * hash) + getDetail().hashCode(); if (hasStateStartTime()) { hash = (37 * hash) + STATE_START_TIME_FIELD_NUMBER; hash = (53 * hash) + getStateStartTime().hashCode(); } hash = (37 * hash) + SUBSTATE_FIELD_NUMBER; hash = (53 * hash) + substate_; hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataproc.v1beta2.ClusterStatus prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The status of a cluster and its instances. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1beta2.ClusterStatus} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.ClusterStatus) com.google.cloud.dataproc.v1beta2.ClusterStatusOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1beta2.ClustersProto .internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1beta2.ClustersProto .internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1beta2.ClusterStatus.class, com.google.cloud.dataproc.v1beta2.ClusterStatus.Builder.class); } // Construct using com.google.cloud.dataproc.v1beta2.ClusterStatus.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); state_ = 0; detail_ = ""; if (stateStartTimeBuilder_ == null) { stateStartTime_ = null; } else { stateStartTime_ = null; stateStartTimeBuilder_ = null; } substate_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataproc.v1beta2.ClustersProto .internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_descriptor; } @java.lang.Override public com.google.cloud.dataproc.v1beta2.ClusterStatus getDefaultInstanceForType() { return com.google.cloud.dataproc.v1beta2.ClusterStatus.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataproc.v1beta2.ClusterStatus build() { com.google.cloud.dataproc.v1beta2.ClusterStatus result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataproc.v1beta2.ClusterStatus buildPartial() { com.google.cloud.dataproc.v1beta2.ClusterStatus result = new com.google.cloud.dataproc.v1beta2.ClusterStatus(this); result.state_ = state_; result.detail_ = detail_; if (stateStartTimeBuilder_ == null) { result.stateStartTime_ = stateStartTime_; } else { result.stateStartTime_ = stateStartTimeBuilder_.build(); } result.substate_ = substate_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataproc.v1beta2.ClusterStatus) { return mergeFrom((com.google.cloud.dataproc.v1beta2.ClusterStatus) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.ClusterStatus other) { if (other == com.google.cloud.dataproc.v1beta2.ClusterStatus.getDefaultInstance()) return this; if (other.state_ != 0) { setStateValue(other.getStateValue()); } if (!other.getDetail().isEmpty()) { detail_ = other.detail_; onChanged(); } if (other.hasStateStartTime()) { mergeStateStartTime(other.getStateStartTime()); } if (other.substate_ != 0) { setSubstateValue(other.getSubstateValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dataproc.v1beta2.ClusterStatus parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dataproc.v1beta2.ClusterStatus) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int state_ = 0; /** * * * <pre> * Output only. The cluster's state. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.State state = 1;</code> */ public int getStateValue() { return state_; } /** * * * <pre> * Output only. The cluster's state. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.State state = 1;</code> */ public Builder setStateValue(int value) { state_ = value; onChanged(); return this; } /** * * * <pre> * Output only. The cluster's state. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.State state = 1;</code> */ public com.google.cloud.dataproc.v1beta2.ClusterStatus.State getState() { @SuppressWarnings("deprecation") com.google.cloud.dataproc.v1beta2.ClusterStatus.State result = com.google.cloud.dataproc.v1beta2.ClusterStatus.State.valueOf(state_); return result == null ? com.google.cloud.dataproc.v1beta2.ClusterStatus.State.UNRECOGNIZED : result; } /** * * * <pre> * Output only. The cluster's state. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.State state = 1;</code> */ public Builder setState(com.google.cloud.dataproc.v1beta2.ClusterStatus.State value) { if (value == null) { throw new NullPointerException(); } state_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. The cluster's state. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.State state = 1;</code> */ public Builder clearState() { state_ = 0; onChanged(); return this; } private java.lang.Object detail_ = ""; /** * * * <pre> * Output only. Optional details of cluster's state. * </pre> * * <code>string detail = 2;</code> */ public java.lang.String getDetail() { java.lang.Object ref = detail_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); detail_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. Optional details of cluster's state. * </pre> * * <code>string detail = 2;</code> */ public com.google.protobuf.ByteString getDetailBytes() { java.lang.Object ref = detail_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); detail_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. Optional details of cluster's state. * </pre> * * <code>string detail = 2;</code> */ public Builder setDetail(java.lang.String value) { if (value == null) { throw new NullPointerException(); } detail_ = value; onChanged(); return this; } /** * * * <pre> * Output only. Optional details of cluster's state. * </pre> * * <code>string detail = 2;</code> */ public Builder clearDetail() { detail_ = getDefaultInstance().getDetail(); onChanged(); return this; } /** * * * <pre> * Output only. Optional details of cluster's state. * </pre> * * <code>string detail = 2;</code> */ public Builder setDetailBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); detail_ = value; onChanged(); return this; } private com.google.protobuf.Timestamp stateStartTime_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> stateStartTimeBuilder_; /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public boolean hasStateStartTime() { return stateStartTimeBuilder_ != null || stateStartTime_ != null; } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public com.google.protobuf.Timestamp getStateStartTime() { if (stateStartTimeBuilder_ == null) { return stateStartTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : stateStartTime_; } else { return stateStartTimeBuilder_.getMessage(); } } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public Builder setStateStartTime(com.google.protobuf.Timestamp value) { if (stateStartTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } stateStartTime_ = value; onChanged(); } else { stateStartTimeBuilder_.setMessage(value); } return this; } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public Builder setStateStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (stateStartTimeBuilder_ == null) { stateStartTime_ = builderForValue.build(); onChanged(); } else { stateStartTimeBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public Builder mergeStateStartTime(com.google.protobuf.Timestamp value) { if (stateStartTimeBuilder_ == null) { if (stateStartTime_ != null) { stateStartTime_ = com.google.protobuf.Timestamp.newBuilder(stateStartTime_) .mergeFrom(value) .buildPartial(); } else { stateStartTime_ = value; } onChanged(); } else { stateStartTimeBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public Builder clearStateStartTime() { if (stateStartTimeBuilder_ == null) { stateStartTime_ = null; onChanged(); } else { stateStartTime_ = null; stateStartTimeBuilder_ = null; } return this; } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public com.google.protobuf.Timestamp.Builder getStateStartTimeBuilder() { onChanged(); return getStateStartTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { if (stateStartTimeBuilder_ != null) { return stateStartTimeBuilder_.getMessageOrBuilder(); } else { return stateStartTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : stateStartTime_; } } /** * * * <pre> * Output only. Time when this state was entered. * </pre> * * <code>.google.protobuf.Timestamp state_start_time = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getStateStartTimeFieldBuilder() { if (stateStartTimeBuilder_ == null) { stateStartTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getStateStartTime(), getParentForChildren(), isClean()); stateStartTime_ = null; } return stateStartTimeBuilder_; } private int substate_ = 0; /** * * * <pre> * Output only. Additional state information that includes * status reported by the agent. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.Substate substate = 4;</code> */ public int getSubstateValue() { return substate_; } /** * * * <pre> * Output only. Additional state information that includes * status reported by the agent. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.Substate substate = 4;</code> */ public Builder setSubstateValue(int value) { substate_ = value; onChanged(); return this; } /** * * * <pre> * Output only. Additional state information that includes * status reported by the agent. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.Substate substate = 4;</code> */ public com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate getSubstate() { @SuppressWarnings("deprecation") com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate result = com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate.valueOf(substate_); return result == null ? com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate.UNRECOGNIZED : result; } /** * * * <pre> * Output only. Additional state information that includes * status reported by the agent. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.Substate substate = 4;</code> */ public Builder setSubstate(com.google.cloud.dataproc.v1beta2.ClusterStatus.Substate value) { if (value == null) { throw new NullPointerException(); } substate_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. Additional state information that includes * status reported by the agent. * </pre> * * <code>.google.cloud.dataproc.v1beta2.ClusterStatus.Substate substate = 4;</code> */ public Builder clearSubstate() { substate_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.ClusterStatus) } // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ClusterStatus) private static final com.google.cloud.dataproc.v1beta2.ClusterStatus DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.ClusterStatus(); } public static com.google.cloud.dataproc.v1beta2.ClusterStatus getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ClusterStatus> PARSER = new com.google.protobuf.AbstractParser<ClusterStatus>() { @java.lang.Override public ClusterStatus parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ClusterStatus(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ClusterStatus> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ClusterStatus> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataproc.v1beta2.ClusterStatus getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
goodev/android-discourse
discourse2/src/main/java/org/goodev/discourse/ui/EditorChangeTitleFragment.java
9269
package org.goodev.discourse.ui; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.database.Cursor; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager.LoaderCallbacks; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.widget.CursorAdapter; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.Spinner; import android.widget.TextView; import org.goodev.discourse.R; import org.goodev.discourse.contentprovider.Provider; import org.goodev.discourse.utils.Utils; import static org.goodev.discourse.database.tables.CategoriesTable.COLOR; import static org.goodev.discourse.database.tables.CategoriesTable.DESCRIPTION; import static org.goodev.discourse.database.tables.CategoriesTable.DESCRIPTION_EXCERPT; import static org.goodev.discourse.database.tables.CategoriesTable.ID; import static org.goodev.discourse.database.tables.CategoriesTable.NAME; import static org.goodev.discourse.database.tables.CategoriesTable.SLUG; import static org.goodev.discourse.database.tables.CategoriesTable.TEXT_COLOR; import static org.goodev.discourse.database.tables.CategoriesTable.TOPIC_COUNT; import static org.goodev.discourse.database.tables.CategoriesTable.UID; public class EditorChangeTitleFragment extends DialogFragment implements LoaderCallbacks<Cursor> { private static final int LOADER_ID_CATEGORY = 0; private static final String[] CATEGORY_COLUMNS = new String[]{ID, UID, NAME, COLOR, TEXT_COLOR, SLUG, TOPIC_COUNT, DESCRIPTION, DESCRIPTION_EXCERPT}; private static final int INDEX_UID = 1; private static final int INDEX_NAME = 2; private static final int INDEX_COLOR = 3; private static final int INDEX_TEXT_COLOR = 4; private static final int INDEX_SLUG = 5; private static final int INDEX_TOPIC_COUNT = 6; private ChangeTopicListener mListener; private EditText mTitleET; private int mCategoryIndex; private long mCategoryId; private String mTitle; private Spinner mCategoriesSpinner; private CursorAdapter mCategoryAdapter; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mCategoryAdapter = new CategoryAdapter(getActivity(), null); if (savedInstanceState != null) { mTitle = savedInstanceState.getString(Utils.EXTRA_TITLE, null); mCategoryIndex = savedInstanceState.getInt(Utils.EXTRA_CAT_INDEX, 0); } else { Bundle args = getArguments(); mTitle = args.getString(Utils.EXTRA_TITLE, null); mCategoryId = args.getLong(Utils.EXTRA_ID, 0); } } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); String title = mTitleET.getText().toString(); if (!TextUtils.isEmpty(title)) { outState.putString(Utils.EXTRA_TITLE, title); } int index = mCategoriesSpinner.getSelectedItemPosition(); outState.putInt(Utils.EXTRA_CAT_INDEX, index); } @Override public void onAttach(Activity activity) { super.onAttach(activity); Fragment f = getParentFragment(); if (activity instanceof ChangeTopicListener) { mListener = (ChangeTopicListener) activity; } else if (f instanceof ChangeTopicListener) { mListener = (ChangeTopicListener) f; } else { throw new ClassCastException(activity.toString() + " must implement ChangeTopicListener"); } } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); getLoaderManager().initLoader(LOADER_ID_CATEGORY, null, this); } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); LayoutInflater inflater = getActivity().getLayoutInflater(); View view = inflater.inflate(R.layout.editor_change_title, null); mTitleET = (EditText) view.findViewById(R.id.edit_title); mTitleET.setText(mTitle); mCategoriesSpinner = (Spinner) view.findViewById(R.id.edit_categories_spinner); mCategoriesSpinner.setAdapter(mCategoryAdapter); builder.setView(view).setTitle(R.string.edit_topic_title).setPositiveButton(R.string.save, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { String name = mTitleET.getText().toString().trim(); int position = mCategoriesSpinner.getSelectedItemPosition(); String catName = ""; long catId = 0; if (position != 0) { Cursor c = (Cursor) mCategoriesSpinner.getSelectedItem(); if (c != null) { catName = c.getString(INDEX_NAME); catId = c.getLong(INDEX_UID); } } if (mListener != null) { mListener.onTopicChange(name, catName, catId); } } }).setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); return builder.create(); } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return new CursorLoader(getActivity(), Provider.CATEGORIES_CONTENT_URI, CATEGORY_COLUMNS, null, null, UID + " ASC"); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { mCategoryAdapter.swapCursor(data); if (mCategoryId > 0) { data.moveToFirst(); int index = 0; while (!data.isAfterLast()) { long id = data.getLong(INDEX_UID); if (id == mCategoryId) { mCategoryIndex = index; break; } data.moveToNext(); index++; } } mCategoriesSpinner.setSelection(mCategoryIndex); } @Override public void onLoaderReset(Loader<Cursor> loader) { } public interface ChangeTopicListener { void onTopicChange(String title, String category, long categoryId); } class CategoryAdapter extends CursorAdapter { LayoutInflater mLayoutInflater; Context mContext; public CategoryAdapter(Context context, Cursor c) { super(context, c, false); mContext = context; mLayoutInflater = LayoutInflater.from(context); } @Override public View newView(Context context, Cursor cursor, ViewGroup parent) { return mLayoutInflater.inflate(R.layout.editor_category_item, parent, false); } @Override public void bindView(View view, Context context, Cursor cursor) { TextView category = (TextView) view.findViewById(R.id.category_name); String name = cursor.getString(INDEX_NAME); category.setText(name); } public void bindDropDownView(View view, Context context, Cursor cursor) { TextView category = (TextView) view.findViewById(R.id.category_name); TextView topicCount = (TextView) view.findViewById(R.id.category_topic_count); String name = cursor.getString(INDEX_NAME); String bgColor = cursor.getString(INDEX_COLOR); String textColor = cursor.getString(INDEX_TEXT_COLOR); Utils.setCategoryView(category, name, bgColor, textColor); long count = cursor.getLong(INDEX_TOPIC_COUNT); topicCount.setText(mContext.getString(R.string.editor_category_count, count)); } @Override public View getDropDownView(int position, View convertView, ViewGroup parent) { if (mDataValid) { mCursor.moveToPosition(position); View v; if (convertView == null) { v = newDropDownView(mContext, mCursor, parent); } else { v = convertView; } bindDropDownView(v, mContext, mCursor); return v; } else { return null; } } @Override public View newDropDownView(Context context, Cursor cursor, ViewGroup parent) { return mLayoutInflater.inflate(R.layout.editor_category_dropdown_item, parent, false); } } }
apache-2.0
alexeremeev/aeremeev
chapter_003(IO)/src/main/java/ru/job4j/io/chat/ConsoleIO.java
1508
package ru.job4j.io.chat; import java.io.BufferedReader; import java.io.IOException; import java.io.PrintStream; /** * class ConsoleIO - ввод/вывод с помощью консоли. */ public class ConsoleIO implements InputOutput { /** * BufferedReader. */ private BufferedReader reader; /** * PrintStream. */ private PrintStream stream; /** * Конструктор. * @param reader BufferedReader. * @param stream PrintStream. */ public ConsoleIO(BufferedReader reader, PrintStream stream) { this.reader = reader; this.stream = stream; } /** * Считывает строку, введенную пользователем. * @return строка, введенная пользователем. */ @Override public String read() { String result = ""; try { result = this.reader.readLine(); } catch (IOException ioe) { ioe.printStackTrace(); } return result; } /** * Вывести сообщение с переводом на новую строку. * @param message сообщение. */ @Override public void println(String message) { this.stream.println(message); } /** * Вывести сообщение. * @param message сообщение. */ @Override public void print(String message) { this.stream.print(message); } }
apache-2.0
heinousjay/JibbrJabbr
kernel/src/test/java/jj/resource/ResourceEventMaker.java
937
/* * Copyright 2012 Jason Miller * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jj.resource; /** * @author jason * */ public enum ResourceEventMaker { ; public static ResourceKilled makeResourceKilled(AbstractResource<?> resource) { return new ResourceKilled(resource); } public static ResourceLoaded makeResourceLoaded(AbstractResource<?> resource) { return new ResourceLoaded(resource); } }
apache-2.0
stumoodie/CompoundGraph
src/uk/ac/ed/inf/graph/compound/ICompoundGraphElementFactory.java
1124
/* Licensed to the Court of the University of Edinburgh (UofE) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The UofE licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package uk.ac.ed.inf.graph.compound; public interface ICompoundGraphElementFactory { void setParent(ICompoundGraphElement parent); void setIndex(int nodeIndex); void setAttribute(IElementAttribute newAttribute); ICompoundEdge createEdge(ICompoundNode outNode, ICompoundNode inNode); ICompoundNode createNode(); }
apache-2.0
droidranger/xygapp
app/src/main/java/com/ranger/xyg/demos/dbean/Course.java
408
package com.ranger.xyg.demos.dbean; /** * Created by xyg on 2017/5/23. */ public class Course { private String name;//课程名 private String id; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getId() { return id; } public void setId(String id) { this.id = id; } }
apache-2.0
Entarolex/java_training
mantis_tests/src/test/java/ru/stqa/pft/mantis/tests/SoapTests.java
1786
package ru.stqa.pft.mantis.tests; import org.testng.annotations.Test; import ru.stqa.pft.mantis.model.Issue; import ru.stqa.pft.mantis.model.Project; import javax.xml.rpc.ServiceException; import java.net.MalformedURLException; import java.rmi.RemoteException; import java.util.Set; import static org.testng.Assert.assertEquals; /** * Created by a.molodkin on 25.04.2016. */ public class SoapTests extends TestBase{ //@Test public void testGetProjects() throws MalformedURLException, ServiceException, RemoteException { Set<Project> projects = app.soap().getProjects(); System.out.println(projects.size()); for (Project project : projects) { System.out.println(project.getName()); } } //@Test public void testCreateIssue() throws MalformedURLException, ServiceException, RemoteException { Set<Project> projects = app.soap().getProjects(); Issue issue = new Issue().withSummary("Test issue") .withDescription("Test issue description").withProject(projects.iterator().next()); Issue created = app.soap().addIssue(issue); assertEquals(issue.getSummary(), created.getSummary()); } @Test public void testGetIssues() throws RemoteException, ServiceException, MalformedURLException { Set<Project> projects = app.soap().getProjects(); Set<Issue> issues = app.soap().getIssues(projects.iterator().next()); System.out.println("Count of Issues : "+ app.soap().getIssues(projects.iterator().next()).size()); for (Issue iss : issues) { System.out.println("Issue Id :"+iss.getId()); System.out.println("status: "+iss.getStatus().getName()); //System.out.println("resolution: "+iss.getResolution().getName()); skipIfNotFixed(iss.getId()); System.out.println("---\n"); } } }
apache-2.0
balajiboggaram/algorithms
src/me/learn/personal/month5/WiggleSortZigZag.java
2937
/** * */ package me.learn.personal.month5; import java.util.ArrayList; import java.util.List; import java.util.PriorityQueue; /** * Title 324 : * * Date : Jan 8, 2021 * * @author bramanarayan * */ public class WiggleSortZigZag { /** * @param args */ public static void main(String[] args) { // TODO Auto-generated method stub } // a[i-1] < a[i] > a[i+1] < a[i+2] > a[i+3] // just ensure the even indexed element is always greater than its neighboring // odd elements. // if not swap them // Does not work for duplicates public void wiggleSortNoDuplicates(int[] nums) { int n = nums.length; for (int i = 0; i < nums.length; i += 2) { if (i > 0 && nums[i - 1] > nums[i]) // if left odd is greater than my current(even) swap(nums, i, i - 1); if (i < n && nums[i + 1] > nums[i]) { swap(nums, i, i + 1); } } } private void swap(int[] nums, int i, int j) { int temp = nums[i]; nums[i] = nums[j]; nums[j] = temp; } // Works With duplicates // get the median - kth largest (depending on odd or even) // put the elements in to parts (lesss than median) and (greater than median) // now do a merge from right to left public void wiggleSort(int[] nums) { int n = nums.length; // int median = selectKth(nums, 0, nums.length - 1, nums.length % 2 == 0 ? // nums.length / 2 : nums.length / 2 + 1); int K = n % 2 == 0 ? (n / 2) : (n / 2 + 1); int median = KthLargest(K, nums); List<Integer> leftArr = new ArrayList(); for (int i = 0; i <= median; i++) leftArr.add(nums[i]); List<Integer> rightArr = new ArrayList(); for (int i = median + 1; i < nums.length; i++) rightArr.add(nums[i]); int left = leftArr.size() - 1; int right = rightArr.size() - 1; int kindex = 0; while (left >= 0 && right >= 0) { nums[kindex] = leftArr.get(left); nums[kindex + 1] = rightArr.get(right); left--; right--; kindex = kindex + 2; } if (nums.length % 2 != 0) nums[nums.length - 1] = leftArr.get(0); } PriorityQueue<Integer> pq = new PriorityQueue<Integer>(); private int KthLargest(int k, int[] nums) { for (int i = 0; i < nums.length; i++) { pq.add(nums[i]); if (pq.size() > k) { pq.remove(); } } int element = pq.peek(); return element; } private int selectKth(int[] nums, int start, int end, int k) { int[] res = partition(nums, start, end); int lb = res[0]; int hb = res[1]; if (k - 1 < lb) return selectKth(nums, start, lb - 1, k); else if (k - 1 > hb) return selectKth(nums, hb + 1, end, k); else return k - 1; } private int[] partition(int[] nums, int lb, int hb) { int pVal = nums[lb]; // use random genarater is better in performance int i = lb; while (i <= hb) { if (nums[i] == pVal) i++; else if (nums[i] < pVal) swap(nums, i++, lb++); else swap(nums, i, hb--); } int[] res = new int[2]; res[0] = lb; res[1] = hb; return res; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-cloudhsmv2/src/main/java/com/amazonaws/services/cloudhsmv2/model/transform/RestoreBackupRequestMarshaller.java
2003
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudhsmv2.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.cloudhsmv2.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * RestoreBackupRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class RestoreBackupRequestMarshaller { private static final MarshallingInfo<String> BACKUPID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("BackupId").build(); private static final RestoreBackupRequestMarshaller instance = new RestoreBackupRequestMarshaller(); public static RestoreBackupRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(RestoreBackupRequest restoreBackupRequest, ProtocolMarshaller protocolMarshaller) { if (restoreBackupRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(restoreBackupRequest.getBackupId(), BACKUPID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
sdcuike/algorithm-2015
src/main/java/com/doctor/interview/FindDuplicateElementsInAnArray.java
570
package com.doctor.interview; import java.util.HashSet; import java.util.Set; /** * @see http://howtodoinjava.com/2015/03/04/find-duplicate-elements-in-an-array/ * * @author doctor * * @time 2015年5月2日 上午12:28:48 */ public class FindDuplicateElementsInAnArray { public static void main(String[] args) { int[] array = { 1, 1, 2, 3, 4, 5, 6, 7, 8, 8 }; Set<Integer> set = new HashSet<>(); for (int i = 0; i < array.length; i++) { if (set.add(array[i]) == false) { System.out.println("Duplicate element found :" + array[i]); } } } }
apache-2.0
ClarisseSan/xyzreader
XYZReader/src/main/java/com/example/xyzreader/ui/DynamicHeightNetworkImageView.java
1590
/* * Copyright 2016 Angela Sanchez Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * */ package com.example.xyzreader.ui; import android.content.Context; import android.util.AttributeSet; import com.android.volley.toolbox.NetworkImageView; public class DynamicHeightNetworkImageView extends NetworkImageView { private float mAspectRatio = 1.5f; public DynamicHeightNetworkImageView(Context context) { super(context); } public DynamicHeightNetworkImageView(Context context, AttributeSet attrs) { super(context, attrs); } public DynamicHeightNetworkImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } public void setAspectRatio(float aspectRatio) { mAspectRatio = aspectRatio; requestLayout(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); int measuredWidth = getMeasuredWidth(); setMeasuredDimension(measuredWidth, (int) (measuredWidth / mAspectRatio)); } }
apache-2.0
benjchristensen/RxJava
src/test/java/io/reactivex/internal/operators/observable/BlockingObservableToIteratorTest.java
3835
/** * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.observable; import static org.junit.Assert.*; import java.util.*; import org.junit.*; import io.reactivex.Observable; import io.reactivex.ObservableSource; import io.reactivex.Observer; import io.reactivex.disposables.Disposables; import io.reactivex.exceptions.TestException; import io.reactivex.internal.operators.observable.BlockingObservableIterable.BlockingObservableIterator; public class BlockingObservableToIteratorTest { @Test public void testToIterator() { Observable<String> obs = Observable.just("one", "two", "three"); Iterator<String> it = obs.blockingIterable().iterator(); assertEquals(true, it.hasNext()); assertEquals("one", it.next()); assertEquals(true, it.hasNext()); assertEquals("two", it.next()); assertEquals(true, it.hasNext()); assertEquals("three", it.next()); assertEquals(false, it.hasNext()); } @Test(expected = TestException.class) public void testToIteratorWithException() { Observable<String> obs = Observable.unsafeCreate(new ObservableSource<String>() { @Override public void subscribe(Observer<? super String> observer) { observer.onSubscribe(Disposables.empty()); observer.onNext("one"); observer.onError(new TestException()); } }); Iterator<String> it = obs.blockingIterable().iterator(); assertEquals(true, it.hasNext()); assertEquals("one", it.next()); assertEquals(true, it.hasNext()); it.next(); } @Ignore("subscribe() should not throw") @Test(expected = TestException.class) public void testExceptionThrownFromOnSubscribe() { Iterable<String> strings = Observable.unsafeCreate(new ObservableSource<String>() { @Override public void subscribe(Observer<? super String> observer) { throw new TestException("intentional"); } }).blockingIterable(); for (String string : strings) { // never reaches here System.out.println(string); } } @Test public void dispose() { BlockingObservableIterator<Integer> it = new BlockingObservableIterator<Integer>(128); assertFalse(it.isDisposed()); it.dispose(); assertTrue(it.isDisposed()); } @Test public void interruptWait() { BlockingObservableIterator<Integer> it = new BlockingObservableIterator<Integer>(128); try { Thread.currentThread().interrupt(); it.hasNext(); } catch (RuntimeException ex) { assertTrue(ex.toString(), ex.getCause() instanceof InterruptedException); } } @Test(expected = NoSuchElementException.class) public void emptyThrowsNoSuch() { BlockingObservableIterator<Integer> it = new BlockingObservableIterator<Integer>(128); it.onComplete(); it.next(); } @Test(expected = UnsupportedOperationException.class) public void remove() { BlockingObservableIterator<Integer> it = new BlockingObservableIterator<Integer>(128); it.remove(); } }
apache-2.0
Sp2000/colplus-backend
colplus-api/src/test/java/life/catalogue/api/RandomInstance.java
5573
package life.catalogue.api; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.*; import org.apache.commons.lang3.StringUtils; public class RandomInstance { private static final Random RND = new Random(); // Upper bounds for randomized values private int maxInt = 100; private int maxStringLength = 10; private int maxArrayLength = 5; /* * Whether or not to set "empty" strings/arrays/collections to null. MUST be true in combination * with JsonInclude.Include.NON_EMPTY !! Otherwise SerDe tests will arbitrarily fail or succeed. */ private boolean emptyToNull = true; /* * Initializes commonly-typed fields (String, int, ...) of an instance of type T. */ @SuppressWarnings({"unchecked"}) public <T> void populate(T instance) { Class<T> c = (Class<T>) instance.getClass(); try { for (Field f : getFields(c)) { setCommonTypes(instance, f); } } catch (IllegalAccessException e) { throw new RuntimeException(e); } } /* * Instantiate and initialize commonly-typed fields. Only works if c has a no-arg constructor. * Fields whose type is in the extraTypes array will also be initialized (again requires a no-arg * constructor for that type). */ public Object create(Class<?> c, Class<?>... extraTypes) { Set<Class<?>> set = new HashSet<>(Arrays.asList(extraTypes)); try { Object instance = c.newInstance(); for (Field f : getFields(c)) { if (setCommonTypes(instance, f)) { continue; } Class<?> t = f.getType(); if (set.contains(t)) { f.set(instance, create(t, extraTypes)); } else if (t.isArray() && set.contains(t.getComponentType())) { f.set(instance, createArray(t.getComponentType(), extraTypes)); } } return instance; } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException(e); } } public List<?> createList(Class<?> c, Class<?>... extraTypes) { int size = RND.nextInt(maxArrayLength + 1); if (size == 0) { return emptyToNull ? null : Collections.emptyList(); } List<Object> list = new ArrayList<>(size); for (int i = 0; i < size; i++) { list.add(create(c, extraTypes)); } return list; } private Object[] createArray(Class<?> c, Class<?>... extraTypes) { int size = RND.nextInt(maxArrayLength + 1); Object[] arr = (Object[]) Array.newInstance(c, size); if (size == 0) { return emptyToNull ? null : arr; } for (int i = 0; i < size; i++) { arr[i] = create(c, extraTypes); } return arr; } public void setMaxInt(int maxInt) { this.maxInt = maxInt; } public void setMaxStringLength(int maxStringLength) { this.maxStringLength = maxStringLength; } public void setMaxArrayLength(int maxArrayLength) { this.maxArrayLength = maxArrayLength; } public void setEmptyToNull(boolean emptyToNull) { this.emptyToNull = emptyToNull; } private <T> boolean setCommonTypes(T instance, Field f) throws IllegalAccessException { Class<?> t = f.getType(); if (t == String.class) { f.set(instance, randomString()); return true; } else if (t == String[].class) { String[] strings = new String[RND.nextInt(maxArrayLength + 1)]; for (int i = 0; i < strings.length; i++) { strings[i] = randomString(); f.set(instance, strings); } return true; } else if (t == int.class) { f.setInt(instance, RND.nextInt(maxInt + 1)); return true; } else if (t == Integer.class) { f.set(instance, RND.nextInt(maxInt + 1)); return true; } else if (t == boolean.class) { f.setBoolean(instance, randomBoolean()); return true; } else if (t == Boolean.class) { f.set(instance, randomBoolean()); return true; } else if (t.isEnum()) { Class<Enum<?>> enumClass = (Class<Enum<?>>) t; Enum<?>[] values = enumClass.getEnumConstants(); int idx = RND.nextInt(values.length); f.set(instance, values[idx]); return true; } else if (t == LocalDateTime.class) { f.set(instance, randomDateTime()); return true; } else if (t == LocalDate.class) { f.set(instance, randomDateTime().toLocalDate()); return true; } return false; } private LocalDateTime randomDateTime() { return LocalDateTime.now(); } private String randomString() { int len = RND.nextInt(maxStringLength + 1); if (len == 0) { return emptyToNull || randomBoolean() ? null : StringUtils.EMPTY; } return RandomUtils.randomLatinString(len); } private static boolean randomBoolean() { return RND.nextInt(2) == 1; } private static ArrayList<Field> getFields(Class<?> cls) { ArrayList<Class<?>> hierarchy = new ArrayList<>(4); Class<?> c = cls; while (c != Object.class) { hierarchy.add(c); c = c.getSuperclass(); } ArrayList<Field> allFields = new ArrayList<>(); for (int i = hierarchy.size() - 1; i >= 0; i--) { c = hierarchy.get(i); Field[] fields = c.getDeclaredFields(); for (Field f : fields) { if (Modifier.isStatic(f.getModifiers())) { continue; } if (!f.isAccessible()) { f.setAccessible(true); } allFields.add(f); } } return allFields; } }
apache-2.0
grails/grails-gdoc-engine
src/main/java/org/radeox/macro/parameter/BaseMacroParameter.java
4055
/* * Copyright 2001-2004 Fraunhofer Gesellschaft, Munich, Germany, for its * Fraunhofer Institute Computer Architecture and Software Technology * (FIRST), Berlin, Germany * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.radeox.macro.parameter; import org.radeox.api.engine.context.RenderContext; import java.util.HashMap; import java.util.Map; import java.util.StringTokenizer; /** * * @author * @version $Id: BaseMacroParameter.java,v 1.12 2004/05/03 11:12:37 stephan Exp $ */ public class BaseMacroParameter implements MacroParameter { private String content; protected Map params; private int size; protected RenderContext context; private int start; private int end; private int contentStart; private int contentEnd; public BaseMacroParameter() { } public BaseMacroParameter(RenderContext context) { this.context = context; } public void setParams(String stringParams) { params = split(stringParams, "|"); size = params.size(); } public RenderContext getContext() { return context; } public Map getParams() { return params; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public int getLength() { return size; } public String get(String index, int idx) { String result = get(index); if (result == null) { result = get(idx); } return result; } public String get(String index) { return (String) params.get(index); } public String get(int index) { return get("" + index); } /** * * Splits a String on a delimiter to a List. The function works like * the perl-function split. * * @param aString a String to split * @param delimiter a delimiter dividing the entries * @return a Array of splittet Strings */ public Map split(String aString, String delimiter) { Map result = new HashMap(); if (null != aString) { StringTokenizer st = new StringTokenizer(aString, delimiter); int i = 0; while (st.hasMoreTokens()) { String value = st.nextToken(); String key = "" + i; if (value.indexOf("=") != -1) { result.put(key, insertValue(value)); int index = value.indexOf("="); key = value.substring(0, index); value = value.substring(index + 1); result.put(key, insertValue(value)); } else { result.put(key, insertValue(value)); } i++; } } return result; } private String insertValue(String s) { int idx = s.indexOf('$'); if (idx != -1) { StringBuffer tmp = new StringBuffer(); Map globals = context.getParameters(); String var = s.substring(idx + 1); if (idx > 0) tmp.append(s.substring(0, idx)); if (globals.containsKey(var)) { tmp.append(globals.get(var)); } return tmp.toString(); } return s; } public void setStart(int start) { this.start = start; } public void setEnd(int end) { this.end = end; } public int getStart() { return this.start; } public int getEnd() { return this.end; } public int getContentStart() { return contentStart; } public void setContentStart(int contentStart) { this.contentStart = contentStart; } public int getContentEnd() { return contentEnd; } public void setContentEnd(int contentEnd) { this.contentEnd = contentEnd; } }
apache-2.0
hongyuhong/flink
flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServer.java
14537
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.blob; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.BlobServerOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.jobmanager.HighAvailabilityMode; import org.apache.flink.runtime.net.SSLUtils; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FileUtils; import org.apache.flink.util.NetUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.SSLContext; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.URL; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * This class implements the BLOB server. The BLOB server is responsible for listening for incoming requests and * spawning threads to handle these requests. Furthermore, it takes care of creating the directory structure to store * the BLOBs or temporarily cache them. */ public class BlobServer extends Thread implements BlobService { /** The log object used for debugging. */ private static final Logger LOG = LoggerFactory.getLogger(BlobServer.class); /** Counter to generate unique names for temporary files. */ private final AtomicInteger tempFileCounter = new AtomicInteger(0); /** The server socket listening for incoming connections. */ private final ServerSocket serverSocket; /** The SSL server context if ssl is enabled for the connections */ private SSLContext serverSSLContext = null; /** Blob Server configuration */ private final Configuration blobServiceConfiguration; /** Indicates whether a shutdown of server component has been requested. */ private final AtomicBoolean shutdownRequested = new AtomicBoolean(); /** Root directory for local file storage */ private final File storageDir; /** Blob store for distributed file storage, e.g. in HA */ private final BlobStore blobStore; /** Set of currently running threads */ private final Set<BlobServerConnection> activeConnections = new HashSet<>(); /** The maximum number of concurrent connections */ private final int maxConnections; /** Lock guarding concurrent file accesses */ private final ReadWriteLock readWriteLock; /** * Shutdown hook thread to ensure deletion of the storage directory (or <code>null</code> if * the configured high availability mode does not equal{@link HighAvailabilityMode#NONE}) */ private final Thread shutdownHook; /** * Instantiates a new BLOB server and binds it to a free network port. * * @param config Configuration to be used to instantiate the BlobServer * @param blobStore BlobStore to store blobs persistently * * @throws IOException * thrown if the BLOB server cannot bind to a free network port or if the * (local or distributed) file storage cannot be created or is not usable */ public BlobServer(Configuration config, BlobStore blobStore) throws IOException { this.blobServiceConfiguration = checkNotNull(config); this.blobStore = checkNotNull(blobStore); this.readWriteLock = new ReentrantReadWriteLock(); // configure and create the storage directory String storageDirectory = config.getString(BlobServerOptions.STORAGE_DIRECTORY); this.storageDir = BlobUtils.initStorageDirectory(storageDirectory); LOG.info("Created BLOB server storage directory {}", storageDir); // configure the maximum number of concurrent connections final int maxConnections = config.getInteger(BlobServerOptions.FETCH_CONCURRENT); if (maxConnections >= 1) { this.maxConnections = maxConnections; } else { LOG.warn("Invalid value for maximum connections in BLOB server: {}. Using default value of {}", maxConnections, BlobServerOptions.FETCH_CONCURRENT.defaultValue()); this.maxConnections = BlobServerOptions.FETCH_CONCURRENT.defaultValue(); } // configure the backlog of connections int backlog = config.getInteger(BlobServerOptions.FETCH_BACKLOG); if (backlog < 1) { LOG.warn("Invalid value for BLOB connection backlog: {}. Using default value of {}", backlog, BlobServerOptions.FETCH_BACKLOG.defaultValue()); backlog = BlobServerOptions.FETCH_BACKLOG.defaultValue(); } this.shutdownHook = BlobUtils.addShutdownHook(this, LOG); if (config.getBoolean(BlobServerOptions.SSL_ENABLED)) { try { serverSSLContext = SSLUtils.createSSLServerContext(config); } catch (Exception e) { throw new IOException("Failed to initialize SSLContext for the blob server", e); } } // ----------------------- start the server ------------------- String serverPortRange = config.getString(BlobServerOptions.PORT); Iterator<Integer> ports = NetUtils.getPortRangeFromString(serverPortRange); final int finalBacklog = backlog; ServerSocket socketAttempt = NetUtils.createSocketFromPorts(ports, new NetUtils.SocketFactory() { @Override public ServerSocket createSocket(int port) throws IOException { if (serverSSLContext == null) { return new ServerSocket(port, finalBacklog); } else { LOG.info("Enabling ssl for the blob server"); return serverSSLContext.getServerSocketFactory().createServerSocket(port, finalBacklog); } } }); if(socketAttempt == null) { throw new IOException("Unable to allocate socket for blob server in specified port range: "+serverPortRange); } else { SSLUtils.setSSLVerAndCipherSuites(socketAttempt, config); this.serverSocket = socketAttempt; } // start the server thread setName("BLOB Server listener at " + getPort()); setDaemon(true); start(); if (LOG.isInfoEnabled()) { LOG.info("Started BLOB server at {}:{} - max concurrent requests: {} - max backlog: {}", serverSocket.getInetAddress().getHostAddress(), getPort(), maxConnections, backlog); } } // -------------------------------------------------------------------------------------------- // Path Accessors // -------------------------------------------------------------------------------------------- /** * Returns a file handle to the file associated with the given blob key on the blob * server. * * <p><strong>This is only called from the {@link BlobServerConnection}</strong> * * @param key identifying the file * @return file handle to the file */ File getStorageLocation(BlobKey key) { return BlobUtils.getStorageLocation(storageDir, key); } /** * Returns a file handle to the file identified by the given jobID and key. * * <p><strong>This is only called from the {@link BlobServerConnection}</strong> * * @param jobID to which the file is associated * @param key to identify the file within the job context * @return file handle to the file */ File getStorageLocation(JobID jobID, String key) { return BlobUtils.getStorageLocation(storageDir, jobID, key); } /** * Method which deletes all files associated with the given jobID. * * <p><strong>This is only called from the {@link BlobServerConnection}</strong> * * @param jobID all files associated to this jobID will be deleted * @throws IOException */ void deleteJobDirectory(JobID jobID) throws IOException { BlobUtils.deleteJobDirectory(storageDir, jobID); } /** * Returns a temporary file inside the BLOB server's incoming directory. * * @return a temporary file inside the BLOB server's incoming directory */ File createTemporaryFilename() { return new File(BlobUtils.getIncomingDirectory(storageDir), String.format("temp-%08d", tempFileCounter.getAndIncrement())); } /** * Returns the blob store. */ BlobStore getBlobStore() { return blobStore; } /** * Returns the lock used to guard file accesses */ public ReadWriteLock getReadWriteLock() { return readWriteLock; } @Override public void run() { try { while (!this.shutdownRequested.get()) { BlobServerConnection conn = new BlobServerConnection(serverSocket.accept(), this); try { synchronized (activeConnections) { while (activeConnections.size() >= maxConnections) { activeConnections.wait(2000); } activeConnections.add(conn); } conn.start(); conn = null; } finally { if (conn != null) { conn.close(); synchronized (activeConnections) { activeConnections.remove(conn); } } } } } catch (Throwable t) { if (!this.shutdownRequested.get()) { LOG.error("BLOB server stopped working. Shutting down", t); try { close(); } catch (Throwable closeThrowable) { LOG.error("Could not properly close the BlobServer.", closeThrowable); } } } } /** * Shuts down the BLOB server. */ @Override public void close() throws IOException { if (shutdownRequested.compareAndSet(false, true)) { Exception exception = null; try { this.serverSocket.close(); } catch (IOException ioe) { exception = ioe; } // wake the thread up, in case it is waiting on some operation interrupt(); try { join(); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); LOG.debug("Error while waiting for this thread to die.", ie); } synchronized (activeConnections) { if (!activeConnections.isEmpty()) { for (BlobServerConnection conn : activeConnections) { LOG.debug("Shutting down connection {}.", conn.getName()); conn.close(); } activeConnections.clear(); } } // Clean up the storage directory try { FileUtils.deleteDirectory(storageDir); } catch (IOException e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } // Remove shutdown hook to prevent resource leaks, unless this is invoked by the // shutdown hook itself if (shutdownHook != null && shutdownHook != Thread.currentThread()) { try { Runtime.getRuntime().removeShutdownHook(shutdownHook); } catch (IllegalStateException e) { // race, JVM is in shutdown already, we can safely ignore this } catch (Throwable t) { LOG.warn("Exception while unregistering BLOB server's cleanup shutdown hook.", t); } } if(LOG.isInfoEnabled()) { LOG.info("Stopped BLOB server at {}:{}", serverSocket.getInetAddress().getHostAddress(), getPort()); } ExceptionUtils.tryRethrowIOException(exception); } } @Override public BlobClient createClient() throws IOException { return new BlobClient(new InetSocketAddress(serverSocket.getInetAddress(), getPort()), blobServiceConfiguration); } /** * Method which retrieves the URL of a file associated with a blob key. The blob server looks * the blob key up in its local storage. If the file exists, then the URL is returned. If the * file does not exist, then a FileNotFoundException is thrown. * * @param requiredBlob blob key associated with the requested file * @return URL of the file * @throws IOException */ @Override public URL getURL(BlobKey requiredBlob) throws IOException { checkArgument(requiredBlob != null, "BLOB key cannot be null."); final File localFile = BlobUtils.getStorageLocation(storageDir, requiredBlob); if (localFile.exists()) { return localFile.toURI().toURL(); } else { try { // Try the blob store blobStore.get(requiredBlob, localFile); } catch (Exception e) { throw new IOException("Failed to copy from blob store.", e); } if (localFile.exists()) { return localFile.toURI().toURL(); } else { throw new FileNotFoundException("Local file " + localFile + " does not exist " + "and failed to copy from blob store."); } } } /** * This method deletes the file associated to the blob key if it exists in the local storage * of the blob server. * * @param key associated with the file to be deleted * @throws IOException */ @Override public void delete(BlobKey key) throws IOException { final File localFile = BlobUtils.getStorageLocation(storageDir, key); readWriteLock.writeLock().lock(); try { if (!localFile.delete() && localFile.exists()) { LOG.warn("Failed to delete locally BLOB " + key + " at " + localFile.getAbsolutePath()); } blobStore.delete(key); } finally { readWriteLock.writeLock().unlock(); } } /** * Returns the port on which the server is listening. * * @return port on which the server is listening */ @Override public int getPort() { return this.serverSocket.getLocalPort(); } /** * Tests whether the BLOB server has been requested to shut down. * * @return True, if the server has been requested to shut down, false otherwise. */ public boolean isShutdown() { return this.shutdownRequested.get(); } /** * Access to the server socket, for testing */ ServerSocket getServerSocket() { return this.serverSocket; } void unregisterConnection(BlobServerConnection conn) { synchronized (activeConnections) { activeConnections.remove(conn); activeConnections.notifyAll(); } } /** * Returns all the current active connections in the BlobServer. * * @return the list of all the active in current BlobServer */ List<BlobServerConnection> getCurrentActiveConnections() { synchronized (activeConnections) { return new ArrayList<BlobServerConnection>(activeConnections); } } }
apache-2.0
pongasoft/kiwidoc
kiwidoc/com.pongasoft.kiwidoc.builder/src/main/java/com/pongasoft/kiwidoc/builder/serializer/model/PackageModelSerializer.java
4202
/* * Copyright (c) 2012 Yan Pujante * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.pongasoft.kiwidoc.builder.serializer.model; import com.pongasoft.kiwidoc.builder.serializer.CollectionSerializer; import com.pongasoft.kiwidoc.builder.serializer.Serializer; import com.pongasoft.kiwidoc.builder.serializer.SerializerException; import com.pongasoft.kiwidoc.model.ClassDefinitionModel; import com.pongasoft.kiwidoc.model.DocModel; import com.pongasoft.kiwidoc.model.PackageModel; import com.pongasoft.kiwidoc.model.resource.LibraryVersionResource; import com.pongasoft.kiwidoc.model.resource.PackageResource; import com.pongasoft.kiwidoc.model.resource.Resource; import java.util.HashMap; import java.util.Map; import static com.pongasoft.kiwidoc.builder.serializer.SerializerUtils.*; /** * @author yan@pongasoft.com */ public class PackageModelSerializer implements Serializer<PackageModel, Object> { public static class FPackageModel { public static final String access = "a"; public static final String library = "l"; public static final String info = "i"; public static final String name = "n"; public static final String classes = "c"; } private final Serializer<LibraryVersionResource, Object> _libraryResourceSerializer; private final Serializer<DocModel, Resource> _docSerializer; private final CollectionSerializer<ClassDefinitionModel, Object> _classDefinitionsSerializer; /** * Constructor */ public PackageModelSerializer(Serializer<LibraryVersionResource, Object> libraryResourceSerializer, Serializer<ClassDefinitionModel, Object> classDefinitionSerializer, Serializer<DocModel, Resource> docSerializer) { _libraryResourceSerializer = libraryResourceSerializer; _classDefinitionsSerializer = new CollectionSerializer<ClassDefinitionModel, Object>(classDefinitionSerializer); _docSerializer = docSerializer; } public Object serialize(PackageModel packageModel) throws SerializerException { if(packageModel == null) return null; Map<String, Object> content = new HashMap<String, Object>(); putOnce(content, FPackageModel.access, packageModel.getAccess()); putOnce(content, FPackageModel.library, _libraryResourceSerializer.serialize(packageModel.getResource().getLibraryVersionResource())); putOnce(content, FPackageModel.name, packageModel.getName()); putOnce(content, FPackageModel.info, _docSerializer.serialize(packageModel.getPackageInfo())); putOnce(content, FPackageModel.classes, _classDefinitionsSerializer.serialize(packageModel.getAllClasses())); return content; } public PackageModel deserialize(Object context, Object objectToDeserialize) throws SerializerException { if(objectToDeserialize == null) return null; Map<String, Object> content = (Map<String, Object>) objectToDeserialize; LibraryVersionResource libraryVersionResource = _libraryResourceSerializer.deserialize(context, req(content, FPackageModel.library)); String packageName = req(content, FPackageModel.name); return new PackageModel((Integer) req(content, FPackageModel.access), new PackageResource(libraryVersionResource, packageName), _docSerializer.deserialize(libraryVersionResource, opt(content, FPackageModel.info)), _classDefinitionsSerializer.deserialize(context, opt(content, FPackageModel.classes))); } }
apache-2.0
mojohaus/servicedocgen-maven-plugin
src/main/java/org/codehaus/mojo/servicedocgen/introspection/JException.java
1610
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.mojo.servicedocgen.introspection; import net.sf.mmm.util.reflect.api.GenericType; import com.thoughtworks.qdox.model.JavaClass; /** * An exception declaration of a {@link JMethod}. * * @see JMethod#getExceptions() * @author hohwille */ public class JException extends JElement { /** * The constructor. * * @param byteType - see {@link #getByteType()}. * @param sourceType - see {@link #getSourceType()}. * @param comment - see {@link #getComment()}. */ public JException( GenericType<?> byteType, JavaClass sourceType, String comment ) { super( byteType, sourceType, comment ); } /** * {@inheritDoc} */ @Override public String toString() { return getByteTypeString(); } }
apache-2.0
VHAINNOVATIONS/Telepathology
Source/Java/CoreValueObjects/main/src/java/gov/va/med/imaging/exchange/business/ImagingServiceRequest.java
1287
/* // Per VHA Directive 2004-038, this routine should not be modified. //+---------------------------------------------------------------+ //| Property of the US Government. | //| No permission to copy or redistribute this software is given. | //| Use of unreleased versions of this software requires the user | //| to execute a written test agreement with the VistA Imaging | //| Development Office of the Department of Veterans Affairs, | //| telephone (301) 734-0100. | //| | //| The Food and Drug Administration classifies this software as | //| a medical device. As such, it may not be changed in any way. | //| Modifications to this software may result in an adulterated | //| medical device under 21CFR820, the use of which is considered | //| to be a violation of US Federal Statutes. | //+---------------------------------------------------------------+ * */ package gov.va.med.imaging.exchange.business; /** * @author Jon Louthian * * Represents an Imaging Service Request in the imaging domain model. * * */ public class ImagingServiceRequest { //INFO Do nothing. This is a dummy class. Not used. }
apache-2.0
OpenGamma/Strata
modules/market/src/main/java/com/opengamma/strata/market/param/ParameterPerturbation.java
1035
/* * Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.market.param; /** * A function interface that allows a single parameter to be perturbed. * <p> * This interface is used by {@link ParameterizedData} to allow parameters to be * efficiently perturbed (altered). The method is invoked with the parameter index, * value and metadata, and must return the new value. */ @FunctionalInterface public interface ParameterPerturbation { /** * Applies a perturbation to a single parameter. * <p> * This method receives three arguments describing a single parameter, the index, * current value and metadata. The result is the perturbed value. * * @param index the parameter index * @param value the parameter value * @param metadata the parameter metadata * @return the perturbed value */ public abstract double perturbParameter(int index, double value, ParameterMetadata metadata); }
apache-2.0
8090boy/gomall.la
legendshop/src/java/com/legendshop/business/dao/impl/MenuManagerDaoImpl.java
3957
/* * * LegendShop 多用户商城系统 * * 版权所有,并保留所有权利。 * */ package com.legendshop.business.dao.impl; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import com.legendshop.business.dao.MenuManagerDao; import com.legendshop.model.entity.Menu; import com.legendshop.model.entity.RoleMenu; import com.legendshop.plugins.Plugin; import com.legendshop.plugins.PluginStatusEnum; import com.legendshop.util.AppUtils; import com.legendshop.util.handler.PluginRepository; /** * The Class MenuManagerDaoImpl. */ public class MenuManagerDaoImpl implements MenuManagerDao { /** The jdbc template. */ private JdbcTemplate jdbcTemplate; /* (non-Javadoc) * @see com.legendshop.business.dao.MenuManagerDao#getMenu() */ @Override public List<Menu> getMenu() { List<Menu> result = new ArrayList<Menu>(); List<Menu> menuList = jdbcTemplate.query("select * from ls_menu order by level,seq", new MenuRowMapper()); //excludes the Menu provided by Plugin stopped if(AppUtils.isNotBlank(menuList)){ List<Plugin> pluginList = PluginRepository.getInstance().getPlugins(); if(AppUtils.isNotBlank(pluginList)){ for (Menu menu : menuList) { if(AppUtils.isNotBlank(menu.getProvidedPlugin())){ for (Plugin plugin : pluginList) { if(menu.getProvidedPlugin().equals(plugin.getPluginConfig().getPulginId()) && (plugin.getPluginConfig().getStatus().equals(PluginStatusEnum.Y))){ result.add(menu); } } }else{ result.add(menu); } } } } return result; } @Override public List<RoleMenu> getRoleMenu() { return jdbcTemplate.query("select rm.role_id,rm.menu_id,r.name as role_name from ls_role_menu rm, ls_role r where rm.role_id = r.id", new RoleMenuMapper()); } private class MenuRowMapper implements RowMapper<Menu>{ @Override public Menu mapRow(ResultSet rs, int rowNum) throws SQLException { Menu menu = new Menu(); menu.setAction(rs.getString("action")); menu.setLabel(rs.getString("label")); menu.setLevel(rs.getInt("level")); menu.setMenuId(rs.getLong("menu_id")); menu.setName(rs.getString("name")); menu.setSeq(rs.getInt("seq")); menu.setParentId(rs.getLong("parent_id")); menu.setProvidedPlugin(rs.getString("provided_plugin")); menu.setTitle(rs.getString("title")); return menu; } } private class RoleMenuMapper implements RowMapper< RoleMenu>{ @Override public RoleMenu mapRow(ResultSet rs, int rowNum) throws SQLException { RoleMenu roleMenu = new RoleMenu(); roleMenu.setMenuId(rs.getLong("menu_id")); roleMenu.setRoleId(rs.getString("role_id")); roleMenu.setRoleName(rs.getString("role_name")); return roleMenu; } } /** * @param jdbcTemplate the jdbcTemplate to set */ public void setJdbcTemplate(JdbcTemplate jdbcTemplate) { this.jdbcTemplate = jdbcTemplate; } @Override public List<RoleMenu> getRoleMenu(List<Long> menuIdList) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < menuIdList.size() - 1; i++) { sb.append("?,"); } sb.append("?)"); String sql = "select rm.role_id,rm.menu_id,r.name as role_name from ls_role_menu rm, ls_role r where rm.role_id = r.id and rm.menu_id in (" + sb.toString(); return jdbcTemplate.query(sql,menuIdList.toArray(), new RoleMenuMapper()); } @Override public List<RoleMenu> getRoleMenu(Long menuId) { String sql = "select rm.role_id,rm.menu_id,r.name as role_name from ls_role_menu rm, ls_role r where rm.role_id = r.id and rm.menu_id = ?" ; return jdbcTemplate.query(sql,new Object[]{menuId}, new RoleMenuMapper()); } /** * 删除一个菜单所对应的角色 */ @Override public void deleteRoleMenu(Long menuId) { jdbcTemplate.update("delete from ls_role_menu where menu_id = ?", menuId); } }
apache-2.0
ajoymajumdar/genie
genie-web/src/test/java/com/netflix/genie/web/configs/ServicesConfigUnitTests.java
8833
/* * * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.genie.web.configs; import com.netflix.genie.common.exceptions.GenieException; import com.netflix.genie.core.jobs.workflow.WorkflowTask; import com.netflix.genie.core.jpa.repositories.JpaApplicationRepository; import com.netflix.genie.core.jpa.repositories.JpaClusterRepository; import com.netflix.genie.core.jpa.repositories.JpaCommandRepository; import com.netflix.genie.core.jpa.repositories.JpaJobExecutionRepository; import com.netflix.genie.core.jpa.repositories.JpaJobMetadataRepository; import com.netflix.genie.core.jpa.repositories.JpaJobRepository; import com.netflix.genie.core.jpa.repositories.JpaJobRequestRepository; import com.netflix.genie.core.properties.JobsProperties; import com.netflix.genie.core.services.ApplicationService; import com.netflix.genie.core.services.ClusterLoadBalancer; import com.netflix.genie.core.services.ClusterService; import com.netflix.genie.core.services.CommandService; import com.netflix.genie.core.services.JobKillService; import com.netflix.genie.core.services.JobPersistenceService; import com.netflix.genie.core.services.JobSearchService; import com.netflix.genie.core.services.JobStateService; import com.netflix.genie.test.categories.UnitTest; import com.netflix.spectator.api.Registry; import org.apache.commons.exec.Executor; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.event.ApplicationEventMulticaster; import org.springframework.core.io.Resource; import org.springframework.mail.javamail.JavaMailSender; import java.util.ArrayList; import java.util.List; import java.util.UUID; /** * Unit Tests for ServicesConfig class. * * @author amsharma * @since 3.0.0 */ @Category(UnitTest.class) public class ServicesConfigUnitTests { private JpaApplicationRepository applicationRepository; private JpaClusterRepository clusterRepository; private JpaCommandRepository commandRepository; private JpaJobExecutionRepository jobExecutionRepository; private JpaJobRepository jobRepository; private JpaJobRequestRepository jobRequestRepository; private JobSearchService jobSearchService; private ServicesConfig servicesConfig; /** * Setup to run before each test. */ @Before public void setUp() { this.applicationRepository = Mockito.mock(JpaApplicationRepository.class); this.clusterRepository = Mockito.mock(JpaClusterRepository.class); this.commandRepository = Mockito.mock(JpaCommandRepository.class); this.jobRepository = Mockito.mock(JpaJobRepository.class); this.jobRequestRepository = Mockito.mock(JpaJobRequestRepository.class); this.jobExecutionRepository = Mockito.mock(JpaJobExecutionRepository.class); this.jobSearchService = Mockito.mock(JobSearchService.class); this.servicesConfig = new ServicesConfig(); } /** * Confirm we can get a cluster load balancer. */ @Test public void canGetClusterLoadBalancer() { Assert.assertNotNull(this.servicesConfig.clusterLoadBalancer()); } /** * Confirm we can get a GenieFileTransfer instance. * * @throws GenieException If there is any problem. */ @Test public void canGetGenieFileTransfer() throws GenieException { Assert.assertNotNull(this.servicesConfig.genieFileTransferService(scheme -> null)); } /** * Confirm we can get a default mail service implementation. */ @Test public void canGetDefaultMailServiceImpl() { Assert.assertNotNull(this.servicesConfig.getDefaultMailServiceImpl()); } /** * Confirm we can get a mail service implementation using JavaMailSender. */ @Test public void canGetMailServiceImpl() { final JavaMailSender javaMailSender = Mockito.mock(JavaMailSender.class); Assert.assertNotNull(this.servicesConfig.getJavaMailSenderMailService(javaMailSender, "fromAddress")); } /** * Can get a bean for Application Service. */ @Test public void canGetApplicationServiceBean() { Assert.assertNotNull( this.servicesConfig.applicationService( this.applicationRepository, this.commandRepository ) ); } /** * Can get a bean for Command Service. */ @Test public void canGetCommandServiceBean() { Assert.assertNotNull( this.servicesConfig.commandService( this.commandRepository, this.applicationRepository, this.clusterRepository ) ); } /** * Can get a bean for Cluster Service. */ @Test public void canGetClusterServiceBean() { Assert.assertNotNull( this.servicesConfig.clusterService( this.clusterRepository, this.commandRepository ) ); } /** * Can get a bean for Job Search Service. */ @Test public void canGetJobSearchServiceBean() { Assert.assertNotNull( this.servicesConfig.jobSearchService( this.jobRepository, this.jobRequestRepository, this.jobExecutionRepository, Mockito.mock(JpaClusterRepository.class), Mockito.mock(JpaCommandRepository.class) ) ); } /** * Can get a bean for Job Persistence Service. */ @Test public void canGetJobPersistenceServiceBean() { Assert.assertNotNull( this.servicesConfig.jobPersistenceService( this.jobRepository, this.jobRequestRepository, Mockito.mock(JpaJobMetadataRepository.class), jobExecutionRepository, this.applicationRepository, this.clusterRepository, this.commandRepository ) ); } /** * Can get a bean for Job Submitter Service. */ @Test public void canGetJobSubmitterServiceBean() { final JobPersistenceService jobPersistenceService = Mockito.mock(JobPersistenceService.class); final ApplicationEventPublisher eventPublisher = Mockito.mock(ApplicationEventPublisher.class); final ApplicationEventMulticaster eventMulticaster = Mockito.mock(ApplicationEventMulticaster.class); final Resource resource = Mockito.mock(Resource.class); final List<WorkflowTask> workflowTasks = new ArrayList<>(); Assert.assertNotNull( this.servicesConfig.jobSubmitterService( jobPersistenceService, eventPublisher, eventMulticaster, workflowTasks, resource, Mockito.mock(Registry.class) ) ); } /** * Can get a bean for Job Coordinator Service. */ @Test public void canGetJobCoordinatorServiceBean() { Assert.assertNotNull( this.servicesConfig.jobCoordinatorService( Mockito.mock(JobPersistenceService.class), Mockito.mock(JobKillService.class), Mockito.mock(JobStateService.class), new JobsProperties(), Mockito.mock(ApplicationService.class), Mockito.mock(ClusterService.class), Mockito.mock(CommandService.class), Mockito.mock(ClusterLoadBalancer.class), Mockito.mock(Registry.class), UUID.randomUUID().toString() ) ); } /** * Can get a bean for Job Kill Service. */ @Test public void canGetJobKillServiceBean() { Assert.assertNotNull( this.servicesConfig.jobKillService( "localhost", this.jobSearchService, Mockito.mock(Executor.class), new JobsProperties(), Mockito.mock(ApplicationEventPublisher.class) ) ); } }
apache-2.0
DoubleSmile/dubbo-learning
dubbo-config/dubbo-config-api/src/main/java/com/alibaba/dubbo/config/AbstractConfig.java
23324
/* * Copyright 1999-2011 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.dubbo.config; import java.io.Serializable; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.alibaba.dubbo.common.Constants; import com.alibaba.dubbo.common.URL; import com.alibaba.dubbo.common.extension.ExtensionLoader; import com.alibaba.dubbo.common.logger.Logger; import com.alibaba.dubbo.common.logger.LoggerFactory; import com.alibaba.dubbo.common.utils.CollectionUtils; import com.alibaba.dubbo.common.utils.ConfigUtils; import com.alibaba.dubbo.common.utils.ReflectUtils; import com.alibaba.dubbo.common.utils.StringUtils; import com.alibaba.dubbo.config.support.Parameter; /** * 配置解析的工具方法、公共方法 * * @author william.liangf * @export */ public abstract class AbstractConfig implements Serializable { private static final long serialVersionUID = 4267533505537413570L; protected static final Logger logger = LoggerFactory.getLogger(AbstractConfig.class); private static final int MAX_LENGTH = 100; private static final int MAX_PATH_LENGTH = 200; private static final Pattern PATTERN_NAME = Pattern.compile("[\\-._0-9a-zA-Z]+"); private static final Pattern PATTERN_MULTI_NAME = Pattern.compile("[,\\-._0-9a-zA-Z]+"); private static final Pattern PATTERN_METHOD_NAME = Pattern.compile("[a-zA-Z][0-9a-zA-Z]*"); private static final Pattern PATTERN_PATH = Pattern.compile("[/\\-$._0-9a-zA-Z]+"); private static final Pattern PATTERN_NAME_HAS_SYMBOL = Pattern.compile("[:*,/\\-._0-9a-zA-Z]+"); private static final Pattern PATTERN_KEY = Pattern.compile("[*,\\-._0-9a-zA-Z]+"); protected String id; @Parameter(excluded = true) public String getId() { return id; } public void setId(String id) { this.id = id; } private static final Map<String, String> legacyProperties = new HashMap<String, String>(); static { legacyProperties.put("dubbo.protocol.name", "dubbo.service.protocol"); legacyProperties.put("dubbo.protocol.host", "dubbo.service.server.host"); legacyProperties.put("dubbo.protocol.port", "dubbo.service.server.port"); legacyProperties.put("dubbo.protocol.threads", "dubbo.service.max.thread.pool.size"); legacyProperties.put("dubbo.consumer.timeout", "dubbo.service.invoke.timeout"); legacyProperties.put("dubbo.consumer.retries", "dubbo.service.max.retry.providers"); legacyProperties.put("dubbo.consumer.check", "dubbo.service.allow.no.provider"); legacyProperties.put("dubbo.service.url", "dubbo.service.address"); } private static String convertLegacyValue(String key, String value) { if (value != null && value.length() > 0) { if ("dubbo.service.max.retry.providers".equals(key)) { return String.valueOf(Integer.parseInt(value) - 1); } else if ("dubbo.service.allow.no.provider".equals(key)) { return String.valueOf(! Boolean.parseBoolean(value)); } } return value; } // protected void appendAnnotation(Class<?> annotationClass, Object annotation) { Method[] methods = annotationClass.getMethods(); //得到注解类的所有方法 for (Method method : methods) { if (method.getDeclaringClass() != Object.class && method.getReturnType() != void.class //方法的返回值不为空 && method.getParameterTypes().length == 0 //方法有参数 && Modifier.isPublic(method.getModifiers()) //方法是public修饰的 && ! Modifier.isStatic(method.getModifiers())) { //方法是非静态的 try { String property = method.getName(); if ("interfaceClass".equals(property) || "interfaceName".equals(property)) { property = "interface"; } String setter = "set" + property.substring(0, 1).toUpperCase() + property.substring(1); Object value = method.invoke(annotation, new Object[0]); if (value != null && ! value.equals(method.getDefaultValue())) { Class<?> parameterType = ReflectUtils.getBoxedClass(method.getReturnType()); //将原生的类型转换为包装类型 if ("filter".equals(property) || "listener".equals(property)) { //如果方法是filter或者listener parameterType = String.class; value = StringUtils.join((String[]) value, ","); } else if ("parameters".equals(property)) { parameterType = Map.class; value = CollectionUtils.toStringMap((String[]) value); } try { Method setterMethod = getClass().getMethod(setter, new Class<?>[] { parameterType }); setterMethod.invoke(this, new Object[] { value }); } catch (NoSuchMethodException e) { // ignore } } } catch (Throwable e) { logger.error(e.getMessage(), e); } } } } //通过反射调用相关的set方法设置相关Config的属性 protected static void appendProperties(AbstractConfig config) { if (config == null) { return; } String prefix = "dubbo." + getTagName(config.getClass()) + "."; Method[] methods = config.getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if (name.length() > 3 && name.startsWith("set") && Modifier.isPublic(method.getModifiers()) // public修饰的set方法 && method.getParameterTypes().length == 1 && isPrimitive(method.getParameterTypes()[0])) {//只有一个参数并且参数为原生类型 //进行方法名字转换,setName->name, setNameSpace -> name-space String property = StringUtils.camelToSplitName(name.substring(3, 4).toLowerCase() + name.substring(4), "-"); String value = null; if (config.getId() != null && config.getId().length() > 0) { String pn = prefix + config.getId() + "." + property; value = System.getProperty(pn); //从系统载入过程中取得相关的property if(! StringUtils.isBlank(value)) { logger.info("Use System Property " + pn + " to config dubbo"); } } if (value == null || value.length() == 0) {//从系统载入过程中取得相关的property String pn = prefix + property; value = System.getProperty(pn); if(! StringUtils.isBlank(value)) { logger.info("Use System Property " + pn + " to config dubbo"); } } if (value == null || value.length() == 0) { Method getter; try {//尝试获得get方法 getter = config.getClass().getMethod("get" + name.substring(3), new Class<?>[0]); } catch (NoSuchMethodException e) { try {//尝试获得is方法 getter = config.getClass().getMethod("is" + name.substring(3), new Class<?>[0]); } catch (NoSuchMethodException e2) { getter = null; } } if (getter != null) { if (getter.invoke(config, new Object[0]) == null) {//如果get方法返回为空的话 if (config.getId() != null && config.getId().length() > 0) { value = ConfigUtils.getProperty(prefix + config.getId() + "." + property); } if (value == null || value.length() == 0) { value = ConfigUtils.getProperty(prefix + property); } if (value == null || value.length() == 0) { String legacyKey = legacyProperties.get(prefix + property); if (legacyKey != null && legacyKey.length() > 0) { value = convertLegacyValue(legacyKey, ConfigUtils.getProperty(legacyKey)); } } } } } if (value != null && value.length() > 0) { method.invoke(config, new Object[] {convertPrimitive(method.getParameterTypes()[0], value)}); } } } catch (Exception e) { logger.error(e.getMessage(), e); } } } //得到名字的标签 ModuleConfig -> module ,ModuleBean -> module private static String getTagName(Class<?> cls) { String tag = cls.getSimpleName(); for (String suffix : SUFFIXS) { if (tag.endsWith(suffix)) { tag = tag.substring(0, tag.length() - suffix.length()); break; } } tag = tag.toLowerCase(); return tag; } protected static void appendParameters(Map<String, String> parameters, Object config) { appendParameters(parameters, config, null); } @SuppressWarnings("unchecked") protected static void appendParameters(Map<String, String> parameters, Object config, String prefix) { if (config == null) { return; } Method[] methods = config.getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if ((name.startsWith("get") || name.startsWith("is")) && ! "getClass".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && isPrimitive(method.getReturnType())) { Parameter parameter = method.getAnnotation(Parameter.class); if (method.getReturnType() == Object.class || parameter != null && parameter.excluded()) { continue; } int i = name.startsWith("get") ? 3 : 2; //比如methodName是getAddressName -> address.name String prop = StringUtils.camelToSplitName(name.substring(i, i + 1).toLowerCase() + name.substring(i + 1), "."); String key; if (parameter != null && parameter.key() != null && parameter.key().length() > 0) { key = parameter.key(); } else { key = prop; } Object value = method.invoke(config, new Object[0]); String str = String.valueOf(value).trim(); if (value != null && str.length() > 0) { if (parameter != null && parameter.escaped()) { str = URL.encode(str); } if (parameter != null && parameter.append()) { String pre = (String)parameters.get(Constants.DEFAULT_KEY + "." + key); if (pre != null && pre.length() > 0) { str = pre + "," + str; } pre = (String)parameters.get(key); if (pre != null && pre.length() > 0) { str = pre + "," + str; } } if (prefix != null && prefix.length() > 0) { key = prefix + "." + key; } parameters.put(key, str); } else if (parameter != null && parameter.required()) { throw new IllegalStateException(config.getClass().getSimpleName() + "." + key + " == null"); } } else if ("getParameters".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && method.getReturnType() == Map.class) { Map<String, String> map = (Map<String, String>) method.invoke(config, new Object[0]); if (map != null && map.size() > 0) { String pre = (prefix != null && prefix.length() > 0 ? prefix + "." : ""); for (Map.Entry<String, String> entry : map.entrySet()) { parameters.put(pre + entry.getKey().replace('-', '.'), entry.getValue()); } } } } catch (Exception e) { throw new IllegalStateException(e.getMessage(), e); } } } protected static void appendAttributes(Map<Object, Object> parameters, Object config) { appendAttributes(parameters, config, null); } protected static void appendAttributes(Map<Object, Object> parameters, Object config, String prefix) { if (config == null) { return; } Method[] methods = config.getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if ((name.startsWith("get") || name.startsWith("is")) && ! "getClass".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && isPrimitive(method.getReturnType())) { Parameter parameter = method.getAnnotation(Parameter.class); if (parameter == null || !parameter.attribute()) continue; String key; if (parameter != null && parameter.key() != null && parameter.key().length() > 0) { key = parameter.key(); } else { int i = name.startsWith("get") ? 3 : 2; key = name.substring(i, i + 1).toLowerCase() + name.substring(i + 1); } Object value = method.invoke(config, new Object[0]); if (value != null) { if (prefix != null && prefix.length() > 0) { key = prefix + "." + key; } parameters.put(key, value); } } } catch (Exception e) { throw new IllegalStateException(e.getMessage(), e); } } } private static boolean isPrimitive(Class<?> type) { return type.isPrimitive() || type == String.class || type == Character.class || type == Boolean.class || type == Byte.class || type == Short.class || type == Integer.class || type == Long.class || type == Float.class || type == Double.class || type == Object.class; } private static Object convertPrimitive(Class<?> type, String value) { if (type == char.class || type == Character.class) { return value.length() > 0 ? value.charAt(0) : '\0'; } else if (type == boolean.class || type == Boolean.class) { return Boolean.valueOf(value); } else if (type == byte.class || type == Byte.class) { return Byte.valueOf(value); } else if (type == short.class || type == Short.class) { return Short.valueOf(value); } else if (type == int.class || type == Integer.class) { return Integer.valueOf(value); } else if (type == long.class || type == Long.class) { return Long.valueOf(value); } else if (type == float.class || type == Float.class) { return Float.valueOf(value); } else if (type == double.class || type == Double.class) { return Double.valueOf(value); } return value; } protected static void checkExtension(Class<?> type, String property, String value) { checkName(property, value); if (value != null && value.length() > 0 && ! ExtensionLoader.getExtensionLoader(type).hasExtension(value)) { throw new IllegalStateException("No such extension " + value + " for " + property + "/" + type.getName()); } } protected static void checkMultiExtension(Class<?> type, String property, String value) { checkMultiName(property, value); if (value != null && value.length() > 0) { String[] values = value.split("\\s*[,]+\\s*"); for (String v : values) { if (v.startsWith(Constants.REMOVE_VALUE_PREFIX)) { v = v.substring(1); } if (Constants.DEFAULT_KEY.equals(v)) { continue; } if (! ExtensionLoader.getExtensionLoader(type).hasExtension(v)) { throw new IllegalStateException("No such extension " + v + " for " + property + "/" + type.getName()); } } } } protected static void checkLength(String property, String value) { checkProperty(property, value, MAX_LENGTH, null); } protected static void checkPathLength(String property, String value) { checkProperty(property, value, MAX_PATH_LENGTH, null); } protected static void checkName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_NAME); } protected static void checkNameHasSymbol(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_NAME_HAS_SYMBOL); } protected static void checkKey(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_KEY); } protected static void checkMultiName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_MULTI_NAME); } protected static void checkPathName(String property, String value) { checkProperty(property, value, MAX_PATH_LENGTH, PATTERN_PATH); } protected static void checkMethodName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_METHOD_NAME); } protected static void checkParameterName(Map<String, String> parameters) { if (parameters == null || parameters.size() == 0) { return; } for (Map.Entry<String, String> entry : parameters.entrySet()) { //change by tony.chenl parameter value maybe has colon.for example napoli address checkNameHasSymbol(entry.getKey(), entry.getValue()); } } protected static void checkProperty(String property, String value, int maxlength, Pattern pattern) { if (value == null || value.length() == 0) { return; } if(value.length() > maxlength){ throw new IllegalStateException("Invalid " + property + "=\"" + value + "\" is longer than " + maxlength); } if (pattern != null) { Matcher matcher = pattern.matcher(value); if(! matcher.matches()) { throw new IllegalStateException("Invalid " + property + "=\"" + value + "\" contain illegal charactor, only digit, letter, '-', '_' and '.' is legal."); } } } static { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { public void run() { if (logger.isInfoEnabled()) { logger.info("Run shutdown hook now."); } ProtocolConfig.destroyAll(); } }, "DubboShutdownHook")); } private static final String[] SUFFIXS = new String[] {"Config", "Bean"}; @Override public String toString() { try { StringBuilder buf = new StringBuilder(); buf.append("<dubbo:"); buf.append(getTagName(getClass())); Method[] methods = getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if ((name.startsWith("get") || name.startsWith("is")) && ! "getClass".equals(name) && ! "get".equals(name) && ! "is".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && isPrimitive(method.getReturnType())) { int i = name.startsWith("get") ? 3 : 2; String key = name.substring(i, i + 1).toLowerCase() + name.substring(i + 1); Object value = method.invoke(this, new Object[0]); if (value != null) { buf.append(" "); buf.append(key); buf.append("=\""); buf.append(value); buf.append("\""); } } } catch (Exception e) { logger.warn(e.getMessage(), e); } } buf.append(" />"); return buf.toString(); } catch (Throwable t) { // 防御性容错 logger.warn(t.getMessage(), t); return super.toString(); } } }
apache-2.0
ruspl-afed/dbeaver
plugins/org.jkiss.dbeaver.ext.generic/src/org/jkiss/dbeaver/ext/generic/model/GenericPrimaryKey.java
3281
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.generic.model; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSEntityConstraintType; import org.jkiss.utils.CommonUtils; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * GenericTableConstraint */ public class GenericPrimaryKey extends GenericTableConstraint { private List<GenericTableConstraintColumn> columns; public GenericPrimaryKey(GenericTable table, String name, @Nullable String remarks, DBSEntityConstraintType constraintType, boolean persisted) { super(table, name, remarks, constraintType, persisted); } /** * Copy constructor * @param constraint */ GenericPrimaryKey(GenericPrimaryKey constraint) { super(constraint.getTable(), constraint.getName(), constraint.getDescription(), constraint.getConstraintType(), constraint.isPersisted()); if (constraint.columns != null) { this.columns = new ArrayList<>(constraint.columns.size()); for (GenericTableConstraintColumn sourceColumn : constraint.columns) { this.columns.add(new GenericTableConstraintColumn(this, sourceColumn)); } } } @Override public List<GenericTableConstraintColumn> getAttributeReferences(DBRProgressMonitor monitor) { return columns; } public void addColumn(GenericTableConstraintColumn column) { if (columns == null) { columns = new ArrayList<>(); } this.columns.add(column); } void setColumns(List<GenericTableConstraintColumn> columns) { this.columns = columns; if (!CommonUtils.isEmpty(this.columns) && this.columns.size() > 1) { Collections.sort(columns, new Comparator<GenericTableConstraintColumn>() { @Override public int compare(GenericTableConstraintColumn o1, GenericTableConstraintColumn o2) { return o1.getOrdinalPosition() - o2.getOrdinalPosition(); } }); } } public boolean hasColumn(GenericTableColumn column) { if (this.columns != null) { for (GenericTableConstraintColumn constColumn : columns) { if (constColumn.getAttribute() == column) { return true; } } } return false; } }
apache-2.0
googleads/google-ads-java
google-ads-stubs-v8/src/main/java/com/google/ads/googleads/v8/services/GetCustomInterestRequestOrBuilder.java
1037
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v8/services/custom_interest_service.proto package com.google.ads.googleads.v8.services; public interface GetCustomInterestRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:google.ads.googleads.v8.services.GetCustomInterestRequest) com.google.protobuf.MessageOrBuilder { /** * <pre> * Required. The resource name of the custom interest to fetch. * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ java.lang.String getResourceName(); /** * <pre> * Required. The resource name of the custom interest to fetch. * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ com.google.protobuf.ByteString getResourceNameBytes(); }
apache-2.0
chengmaoning/jroad
src/test/java/com/chengmaoning/jroad/aop/AopTest.java
557
/** * */ package com.chengmaoning.jroad.aop; import org.junit.Test; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * AopTest.java * @author chengmaoning * * 2018年7月5日下午7:17:19 */ public class AopTest { @Test public void test() { ApplicationContext context = new ClassPathXmlApplicationContext("application-context.xml"); TargetClass targetClass = (TargetClass) context.getBean("targetClass"); targetClass.addUser("cheng", "maoning"); } }
apache-2.0
devsoulwolf/Android-ParallaxRefresh
library/src/main/java/net/soulwolf/widget/parallaxrefresh/widget/ParallaxGridView.java
3457
/** * <pre> * Copyright 2015 Soulwolf Ching * Copyright 2015 The Android Open Source Project for Android-ParallaxRefresh * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </pre> */ package net.soulwolf.widget.parallaxrefresh.widget; import android.content.Context; import android.support.annotation.NonNull; import android.util.AttributeSet; import android.view.View; import android.widget.AbsListView; import net.soulwolf.widget.parallaxrefresh.ParallaxScrollObserver; import net.soulwolf.widget.parallaxrefresh.event.ParallaxScrollCallback; import net.soulwolf.widget.parallaxrefresh.view.GridViewCompat; /** * author: Soulwolf Created on 2015/9/5 19:34. * email : Ching.Soulwolf@gmail.com */ public class ParallaxGridView extends GridViewCompat implements ParallaxScrollObserver, AbsListView.OnScrollListener { private ParallaxScrollCallback mParallaxScrollCallback; private OnScrollListener mDelegateOnScrollListener; private int mScrollY; public ParallaxGridView(Context context) { super(context); initialize(); } public ParallaxGridView(Context context, AttributeSet attrs) { super(context, attrs); initialize(); } public ParallaxGridView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); initialize(); } private void initialize() { super.setOnScrollListener(this); } @Override public void setOnScrollListener(OnScrollListener l) { this.mDelegateOnScrollListener = l; } @Override public void setScrollCallback(@NonNull ParallaxScrollCallback callback) { this.mParallaxScrollCallback = callback; } @Override public void setPlaceholder(@NonNull View view) { removeHeaderView(view); addHeaderView(view); } @Override public void onScrollStateChanged(AbsListView view, int scrollState) { if(mDelegateOnScrollListener != null){ mDelegateOnScrollListener.onScrollStateChanged(view,scrollState); } } @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { int realScrollY = getRealScrollY(); if(mParallaxScrollCallback != null && realScrollY != mScrollY){ mParallaxScrollCallback.onParallaxScrollChanged(0,realScrollY,false); mScrollY = realScrollY; } if(mDelegateOnScrollListener != null){ mDelegateOnScrollListener.onScroll(view, firstVisibleItem, visibleItemCount, totalItemCount); } } private int getRealScrollY() { View c = getChildCount() > 0 ? getChildAt(0) : null; if (c == null) { return 0; } int firstVisiblePosition = getFirstVisiblePosition(); int top = c.getTop(); return -top + firstVisiblePosition * c.getHeight() ; } }
apache-2.0
chtyim/cdap
cdap-app-fabric/src/main/java/co/cask/cdap/internal/app/runtime/batch/MapReduceTaskContextProvider.java
9263
/* * Copyright © 2014-2016 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.internal.app.runtime.batch; import co.cask.cdap.api.mapreduce.MapReduceSpecification; import co.cask.cdap.api.metrics.MetricsCollectionService; import co.cask.cdap.app.metrics.MapReduceMetrics; import co.cask.cdap.app.program.Program; import co.cask.cdap.app.program.Programs; import co.cask.cdap.common.twill.LocalLocationFactory; import co.cask.cdap.data2.dataset2.DatasetFramework; import co.cask.cdap.internal.app.runtime.workflow.WorkflowMapReduceProgram; import co.cask.tephra.TransactionSystemClient; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.util.concurrent.AbstractIdleService; import com.google.inject.Injector; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.twill.discovery.DiscoveryServiceClient; import org.apache.twill.filesystem.Location; import org.apache.twill.filesystem.LocationFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; /** * Provides access to MapReduceTaskContext for mapreduce job tasks. */ public class MapReduceTaskContextProvider extends AbstractIdleService { private static final Logger LOG = LoggerFactory.getLogger(MapReduceTaskContextProvider.class); private final Injector injector; // Maintain a cache of taskId to MapReduceTaskContext // Each task should have it's own instance of MapReduceTaskContext so that different dataset instance will // be created for different task, which is needed in local mode since job runs with multiple threads private final LoadingCache<ContextCacheKey, BasicMapReduceTaskContext> taskContexts; /** * Helper method to tell if the MR is running in local mode or not. This method doesn't really belongs to this * class, but currently there is no better place for it. */ static boolean isLocal(Configuration hConf) { String mrFramework = hConf.get(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); return MRConfig.LOCAL_FRAMEWORK_NAME.equals(mrFramework); } /** * Creates an instance with the given {@link Injector} that will be used for getting service instances. */ protected MapReduceTaskContextProvider(Injector injector) { this.injector = injector; this.taskContexts = CacheBuilder.newBuilder().build(createCacheLoader(injector)); } protected Injector getInjector() { return injector; } @Override protected void startUp() throws Exception { // no-op } @Override protected void shutDown() throws Exception { // Close all the contexts to release resources for (BasicMapReduceTaskContext context : taskContexts.asMap().values()) { try { context.close(); } catch (Exception e) { LOG.warn("Exception when closing context {}", context, e); } } } /** * Returns the {@link BasicMapReduceTaskContext} for the given task. */ public final <K, V> BasicMapReduceTaskContext<K, V> get(TaskAttemptContext taskAttemptContext) { ContextCacheKey key = new ContextCacheKey(taskAttemptContext); @SuppressWarnings("unchecked") BasicMapReduceTaskContext<K, V> context = (BasicMapReduceTaskContext<K, V>) taskContexts.getUnchecked(key); return context; } /** * Creates a {@link Program} instance based on the information from the {@link MapReduceContextConfig}, using * the given program ClassLoader. */ private Program createProgram(MapReduceContextConfig contextConfig, ClassLoader programClassLoader) { Location programLocation; LocationFactory locationFactory = new LocalLocationFactory(); if (isLocal(contextConfig.getHConf())) { // Just create a local location factory. It's for temp usage only as the program location is always absolute. programLocation = locationFactory.create(contextConfig.getProgramJarURI()); } else { // In distributed mode, the program jar is localized to the container programLocation = locationFactory.create(new File(contextConfig.getProgramJarName()).getAbsoluteFile().toURI()); } try { Program program = Programs.create(programLocation, programClassLoader); String mapReduceName = contextConfig.getProgramNameInWorkflow(); // See if it was launched from Workflow; if it was, change the Program. if (mapReduceName != null) { MapReduceSpecification mapReduceSpec = program.getApplicationSpecification().getMapReduce().get(mapReduceName); Preconditions.checkArgument(mapReduceSpec != null, "Cannot find MapReduceSpecification for %s in %s.", mapReduceName, program.getId()); program = new WorkflowMapReduceProgram(program, mapReduceSpec); } return program; } catch (IOException e) { throw Throwables.propagate(e); } } /** * Creates a {@link CacheLoader} for the task context cache. */ private CacheLoader<ContextCacheKey, BasicMapReduceTaskContext> createCacheLoader(final Injector injector) { final DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class); final DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class); // Multiple instances of BasicMapReduceTaskContext can shares the same program. final AtomicReference<Program> programRef = new AtomicReference<>(); return new CacheLoader<ContextCacheKey, BasicMapReduceTaskContext>() { @Override public BasicMapReduceTaskContext load(ContextCacheKey key) throws Exception { MapReduceContextConfig contextConfig = new MapReduceContextConfig(key.getConfiguration()); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration()); Program program = programRef.get(); if (program == null) { // Creation of program is relatively cheap, so just create and do compare and set. programRef.compareAndSet(null, createProgram(contextConfig, classLoader)); program = programRef.get(); } MapReduceSpecification spec = program.getApplicationSpecification().getMapReduce().get(program.getName()); MapReduceMetrics.TaskType taskType = null; if (MapReduceMetrics.TaskType.hasType(key.getTaskAttemptID().getTaskType())) { taskType = MapReduceMetrics.TaskType.from(key.getTaskAttemptID().getTaskType()); } // if this is not for a mapper or a reducer, we don't need the metrics collection service MetricsCollectionService metricsCollectionService = (taskType == null) ? null : injector.getInstance(MetricsCollectionService.class); TransactionSystemClient txClient = injector.getInstance(TransactionSystemClient.class); return new BasicMapReduceTaskContext( program, taskType, contextConfig.getRunId(), key.getTaskAttemptID().getTaskID().toString(), contextConfig.getArguments(), spec, contextConfig.getLogicalStartTime(), contextConfig.getWorkflowToken(), discoveryServiceClient, metricsCollectionService, txClient, contextConfig.getTx(), datasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources() ); } }; } /** * Private class to represent the caching key for the {@link BasicMapReduceTaskContext} instances. */ private static final class ContextCacheKey { private final TaskAttemptID taskAttemptID; private final Configuration configuration; private ContextCacheKey(TaskAttemptContext context) { this.taskAttemptID = context.getTaskAttemptID(); this.configuration = context.getConfiguration(); } public TaskAttemptID getTaskAttemptID() { return taskAttemptID; } public Configuration getConfiguration() { return configuration; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } // Only compares with the task ID ContextCacheKey that = (ContextCacheKey) o; return Objects.equals(taskAttemptID, that.taskAttemptID); } @Override public int hashCode() { return Objects.hash(taskAttemptID); } } }
apache-2.0
nezihyigitbasi/presto
presto-main/src/test/java/com/facebook/presto/server/TestHttpRequestSessionContext.java
5578
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.spi.security.Identity; import com.facebook.presto.spi.security.SelectedRole; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import org.testng.annotations.Test; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.WebApplicationException; import java.util.Optional; import static com.facebook.presto.SystemSessionProperties.HASH_PARTITION_COUNT; import static com.facebook.presto.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE; import static com.facebook.presto.SystemSessionProperties.QUERY_MAX_MEMORY; import static com.facebook.presto.client.PrestoHeaders.PRESTO_CATALOG; import static com.facebook.presto.client.PrestoHeaders.PRESTO_CLIENT_INFO; import static com.facebook.presto.client.PrestoHeaders.PRESTO_EXTRA_CREDENTIAL; import static com.facebook.presto.client.PrestoHeaders.PRESTO_LANGUAGE; import static com.facebook.presto.client.PrestoHeaders.PRESTO_PREPARED_STATEMENT; import static com.facebook.presto.client.PrestoHeaders.PRESTO_ROLE; import static com.facebook.presto.client.PrestoHeaders.PRESTO_SCHEMA; import static com.facebook.presto.client.PrestoHeaders.PRESTO_SESSION; import static com.facebook.presto.client.PrestoHeaders.PRESTO_SOURCE; import static com.facebook.presto.client.PrestoHeaders.PRESTO_TIME_ZONE; import static com.facebook.presto.client.PrestoHeaders.PRESTO_USER; import static org.testng.Assert.assertEquals; public class TestHttpRequestSessionContext { @Test public void testSessionContext() { HttpServletRequest request = new MockHttpServletRequest( ImmutableListMultimap.<String, String>builder() .put(PRESTO_USER, "testUser") .put(PRESTO_SOURCE, "testSource") .put(PRESTO_CATALOG, "testCatalog") .put(PRESTO_SCHEMA, "testSchema") .put(PRESTO_LANGUAGE, "zh-TW") .put(PRESTO_TIME_ZONE, "Asia/Taipei") .put(PRESTO_CLIENT_INFO, "client-info") .put(PRESTO_SESSION, QUERY_MAX_MEMORY + "=1GB") .put(PRESTO_SESSION, JOIN_DISTRIBUTION_TYPE + "=partitioned," + HASH_PARTITION_COUNT + " = 43") .put(PRESTO_PREPARED_STATEMENT, "query1=select * from foo,query2=select * from bar") .put(PRESTO_ROLE, "foo_connector=ALL") .put(PRESTO_ROLE, "bar_connector=NONE") .put(PRESTO_ROLE, "foobar_connector=ROLE{role}") .put(PRESTO_EXTRA_CREDENTIAL, "test.token.foo=bar") .put(PRESTO_EXTRA_CREDENTIAL, "test.token.abc=xyz") .build(), "testRemote"); HttpRequestSessionContext context = new HttpRequestSessionContext(request); assertEquals(context.getSource(), "testSource"); assertEquals(context.getCatalog(), "testCatalog"); assertEquals(context.getSchema(), "testSchema"); assertEquals(context.getIdentity(), new Identity("testUser", Optional.empty())); assertEquals(context.getClientInfo(), "client-info"); assertEquals(context.getLanguage(), "zh-TW"); assertEquals(context.getTimeZoneId(), "Asia/Taipei"); assertEquals(context.getSystemProperties(), ImmutableMap.of(QUERY_MAX_MEMORY, "1GB", JOIN_DISTRIBUTION_TYPE, "partitioned", HASH_PARTITION_COUNT, "43")); assertEquals(context.getPreparedStatements(), ImmutableMap.of("query1", "select * from foo", "query2", "select * from bar")); assertEquals(context.getIdentity().getRoles(), ImmutableMap.of( "foo_connector", new SelectedRole(SelectedRole.Type.ALL, Optional.empty()), "bar_connector", new SelectedRole(SelectedRole.Type.NONE, Optional.empty()), "foobar_connector", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("role")))); assertEquals(context.getIdentity().getExtraCredentials(), ImmutableMap.of("test.token.foo", "bar", "test.token.abc", "xyz")); } @Test(expectedExceptions = WebApplicationException.class) public void testPreparedStatementsHeaderDoesNotParse() { HttpServletRequest request = new MockHttpServletRequest( ImmutableListMultimap.<String, String>builder() .put(PRESTO_USER, "testUser") .put(PRESTO_SOURCE, "testSource") .put(PRESTO_CATALOG, "testCatalog") .put(PRESTO_SCHEMA, "testSchema") .put(PRESTO_LANGUAGE, "zh-TW") .put(PRESTO_TIME_ZONE, "Asia/Taipei") .put(PRESTO_CLIENT_INFO, "null") .put(PRESTO_PREPARED_STATEMENT, "query1=abcdefg") .build(), "testRemote"); new HttpRequestSessionContext(request); } }
apache-2.0
vori1987/java_pft2
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/appmenager/ApplicationManager.java
3078
package ru.stqa.pft.addressbook.appmenager; import org.openqa.selenium.*; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.firefox.FirefoxOptions; import org.openqa.selenium.ie.InternetExplorerDriver; import org.openqa.selenium.remote.BrowserType; import org.openqa.selenium.remote.DesiredCapabilities; import org.openqa.selenium.remote.RemoteWebDriver; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.net.URL; import java.util.Objects; import java.util.Properties; import java.util.concurrent.TimeUnit; import static java.lang.String.format; public class ApplicationManager { private final Properties properties; WebDriver wd; private SessionHelper sessionHelper; private NavigationHelper navigationHelper; private GroupHelper groupHelper; private ContactHelper contactHelper; private String browser; private DbHelper dbHelper; public ApplicationManager(String browser) { this.browser = browser; properties = new Properties(); } public void init() throws IOException, InterruptedException { String target = System.getProperty("target", "local"); properties.load(new FileReader(new File(String.format("src/test/resources/%s.properties", target)))); dbHelper = new DbHelper(); if ("".equals(properties.getProperty("selenium.server"))) { if (browser.equals(BrowserType.FIREFOX)) { wd = new FirefoxDriver(new FirefoxOptions().setLegacy(true)); } else if (browser.equals(BrowserType.CHROME)) { wd = new ChromeDriver(); } else if (browser.equals(BrowserType.IE)) { wd = new InternetExplorerDriver(); } } else { DesiredCapabilities capabilities = new DesiredCapabilities(); capabilities.setBrowserName(browser); capabilities.setPlatform(Platform.fromString(System.getProperty("platform", "Win7"))); wd = new RemoteWebDriver(new URL(properties.getProperty("selenium.server")), capabilities); } wd.manage().timeouts().implicitlyWait(0, TimeUnit.SECONDS); wd.get(properties.getProperty("web.baseUrl")); // Thread.sleep(5000); groupHelper = new GroupHelper(wd); contactHelper = new ContactHelper(wd); navigationHelper = new NavigationHelper(wd); sessionHelper = new SessionHelper(wd); sessionHelper.login(properties.getProperty("web.adminLogin"), properties.getProperty("web.adminPassword")); } public void stop() { wd.quit(); } public byte[] takeScreenshot(){ return ((TakesScreenshot) wd).getScreenshotAs(OutputType.BYTES); } public GroupHelper group() { return groupHelper; } public ContactHelper contact() { return contactHelper; } public NavigationHelper goTo() { return navigationHelper; } public DbHelper db() { return dbHelper; } }
apache-2.0
lamfire/chimaera
src/main/java/com/lamfire/chimaera/annotation/SERVICE.java
404
package com.lamfire.chimaera.annotation; import java.lang.annotation.*; /** * Created with IntelliJ IDEA. * User: admin * Date: 13-11-8 * Time: 下午10:56 * To change this template use File | Settings | File Templates. */ @Documented @Inherited @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE}) public @interface SERVICE { public abstract String command(); }
apache-2.0
ebayopensource/turmeric-runtime
codegen/codegen-tools/src/main/java/org/ebayopensource/turmeric/tools/codegen/external/wsdl/parser/schema/SimpleType.java
3064
/******************************************************************************* * Copyright (c) 2006-2010 eBay Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 *******************************************************************************/ package org.ebayopensource.turmeric.tools.codegen.external.wsdl.parser.schema; import java.io.Serializable; import javax.xml.namespace.QName; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * A class to represent a &lt;simpleType&gt; element in a schema * * @author Owen Burroughs <owenb@apache.org> */ public class SimpleType extends SchemaType implements Serializable { static final long serialVersionUID = 1L; private String name = ""; private QName typeName = null; private SimpleTypeRestriction restriction = null; private SimpleTypeList list = null; /** * Constructor * @param el The dom element for this simpleType */ SimpleType(Element el, String tns) { super(el, tns); typeName = getAttributeQName(el, "name", tns); processOtherElements(el, tns); // If the element has no name, we cannot map it. Don't do any more processing // of this type if (typeName == null) return; name = typeName.getLocalPart(); } SimpleType(Element el, String tns, QName typeName) { super(el, tns); this.typeName = typeName; processOtherElements(el, tns); // If the element has no name, we cannot map it. Don't do any more processing // of this type if (typeName == null) return; name = typeName.getLocalPart(); } public void processOtherElements(Element el, String tns){ NodeList children = el.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node child = children.item(i); if (child.getNodeType() == Node.ELEMENT_NODE) { Element subEl = (Element) child; String elType = subEl.getLocalName(); if (elType.equals("list") ) { list = new SimpleTypeList(subEl, tns); }else if(elType.equals("restriction") ) { restriction = new SimpleTypeRestriction(subEl, tns); } } } } public String getName() { return name; } /** * @see SchemaType#isComplex() */ public boolean isComplex() { return false; } /** * @see SchemaType#getTypeName() */ public QName getTypeName() { return typeName; } /** * @see SchemaType#isSimple() */ public boolean isSimple() { return true; } public SimpleTypeList getList(){ return list; } public SimpleTypeRestriction getRestriction(){ return restriction; } }
apache-2.0
semagrow/semagrow
core/src/main/java/org/semagrow/evaluation/LoggingTupleQueryResultHandler.java
4032
package org.semagrow.evaluation; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.semagrow.evaluation.file.MaterializationManager; import org.semagrow.evaluation.file.MaterializationHandle; import org.semagrow.evaluation.file.QueryResultHandlerWrapper; import org.semagrow.evaluation.util.LoggingUtil; import org.semagrow.querylog.api.QueryLogException; import org.semagrow.querylog.api.QueryLogHandler; import org.semagrow.querylog.api.QueryLogRecord; import org.semagrow.querylog.impl.QueryLogRecordImpl; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.query.*; import org.eclipse.rdf4j.query.impl.EmptyBindingSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URL; import java.util.List; import java.util.UUID; /** * Created by antru on 6/10/2015. */ public class LoggingTupleQueryResultHandler extends QueryResultHandlerWrapper implements TupleQueryResultHandler { private static final Logger logger = LoggerFactory.getLogger(LoggingTupleQueryResultHandler.class); private MaterializationManager mat; private MaterializationHandle handle; private QueryLogHandler qfrHandler; private String query; private UUID uuid; private int count; private URL endpoint; private long start; private long end; private QueryLogRecord queryLogRecord; public LoggingTupleQueryResultHandler(String q, QueryResultHandler handler, QueryLogHandler qfrHandler, MaterializationManager mat, URL endpoint) { super(handler); this.mat = mat; this.qfrHandler = qfrHandler; this.endpoint = endpoint; this.query = q; this.uuid = UUID.randomUUID(); } @Override public void startQueryResult(List<String> list) throws TupleQueryResultHandlerException { count = 0; start = System.currentTimeMillis(); if (handle != null) { queryLogRecord = createMetadata(SimpleValueFactory.getInstance().createIRI(endpoint.toString()), query, EmptyBindingSet.getInstance(), list); try { handle = mat.saveResult(); } catch (QueryEvaluationException e) { logger.error("Error while creating a materialization handle", e); } handle.startQueryResult(list); } super.startQueryResult(list); } @Override public void endQueryResult() throws TupleQueryResultHandlerException { LoggingUtil.logEnd(logger, query, endpoint, count); if (handle != null) { handle.endQueryResult(); end = System.currentTimeMillis(); queryLogRecord.setCardinality(count); queryLogRecord.setDuration(start, end); if (queryLogRecord.getCardinality() == 0) { try { handle.destroy(); } catch (IOException e) { logger.error("Error while destroying a materialization handle", e); } } else { queryLogRecord.setResults(handle.getId()); } try { qfrHandler.handleQueryRecord(queryLogRecord); } catch (QueryLogException e) { logger.error("Error while pushing record to queryloghandler", e); } } super.endQueryResult(); } @Override public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { count++; if (count == 1) { LoggingUtil.logFirstResult(logger, query, endpoint); } LoggingUtil.logResult(logger, query, endpoint, bindingSet); if (handle != null) { handle.handleSolution(bindingSet); } super.handleSolution(bindingSet); } protected QueryLogRecordImpl createMetadata(IRI endpoint, String expr, BindingSet bindings, List<String> bindingNames) { return new QueryLogRecordImpl(uuid, endpoint, expr, bindings, bindingNames); } }
apache-2.0
saeg/baduino
bundles/br.usp.each.saeg.baduino.ui/src/main/java/org/objectweb/asm/tree/ModuleProvideNode.java
2702
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package org.objectweb.asm.tree; import org.objectweb.asm.ModuleVisitor; /** * A node that represents a service and its implementation provided by the current module. * * @author Remi Forax */ public class ModuleProvideNode { /** * The service name (in its internal form). */ public String service; /** * The implementation name (in its internal form). */ public String impl; /** * Constructs a new {@link ModuleProvideNode}. * * @param service * the service name (in its internal form). * @param impl * the implementation name (in its internal form). */ public ModuleProvideNode(final String service, final String impl) { this.service = service; this.impl = impl; } /** * Makes the given module visitor visit this require declaration. * * @param mv * a module visitor. */ public void accept(final ModuleVisitor mv) { mv.visitProvide(service, impl); } }
apache-2.0
EXXETA/sonar-esql-plugin
esql-plugin/src/test/java/com/exxeta/iss/sonar/esql/EsqlPluginTest.java
2491
/* * Sonar ESQL Plugin * Copyright (C) 2013-2022 Thomas Pohl and EXXETA AG * http://www.exxeta.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.exxeta.iss.sonar.esql; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.Test; import org.sonar.api.Plugin; import org.sonar.api.SonarEdition; import org.sonar.api.SonarQubeSide; import org.sonar.api.SonarRuntime; import org.sonar.api.config.PropertyDefinition; import org.sonar.api.internal.SonarRuntimeImpl; import org.sonar.api.utils.Version; import static org.assertj.core.api.Assertions.assertThat; class EsqlPluginTest { @Test void should_contain_right_properties_number() throws Exception { assertThat(properties()).hasSize(4); } @Test void should_have_Esql_as_category_for_properties() throws Exception { List<PropertyDefinition> properties = properties(); assertThat(properties).isNotEmpty(); for (PropertyDefinition propertyDefinition : properties) { assertThat(propertyDefinition.category()).isEqualTo("Esql"); } } @Test void count_extensions() throws Exception { Plugin.Context context = setupContext(SonarRuntimeImpl.forSonarQube(Version.create(7, 9), SonarQubeSide.SERVER, SonarEdition.COMMUNITY)); assertThat(context.getExtensions()).hasSize(10); } private List<PropertyDefinition> properties() { List<PropertyDefinition> propertiesList = new ArrayList<>(); List extensions = setupContext(SonarRuntimeImpl.forSonarQube(Version.create(7, 9), SonarQubeSide.SERVER, SonarEdition.COMMUNITY)).getExtensions(); for (Object extension : extensions) { if (extension instanceof PropertyDefinition) { propertiesList.add((PropertyDefinition) extension); } } return propertiesList; } private Plugin.Context setupContext(SonarRuntime runtime) { Plugin.Context context = new Plugin.Context(runtime); new EsqlPlugin().define(context); return context; } }
apache-2.0
TYKYTeam/AndroidBase
appbase/src/main/java/net/liang/appbaselibrary/base/BaseFragment.java
3728
package net.liang.appbaselibrary.base; import android.content.Intent; import android.databinding.DataBindingUtil; import android.databinding.ViewDataBinding; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.socks.library.KLog; import net.liang.appbaselibrary.base.mvp.MvpPresenter; import net.liang.appbaselibrary.base.mvp.MvpView; import net.liang.appbaselibrary.utils.NetworkUtils; import net.liang.appbaselibrary.utils.ToastUtils; import net.liang.appbaselibrary.widget.dialog.DialogHelper; import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.Subscribe; import butterknife.ButterKnife; /** * Created by lianghuiyong@outlook.com on 2016/6/22. */ public abstract class BaseFragment extends Fragment implements BaseViewInterface, MvpView { private ViewDataBinding binding; protected abstract MvpPresenter getPresenter(); protected abstract int getLayoutId(); private DialogHelper dialogHelper; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { binding = DataBindingUtil.inflate(inflater, getLayoutId(), container, false); return getView(); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); if (isUseButterKnife()){ ButterKnife.bind(this, getView()); } initRecyclerView(); init(); initTabs(); } @Override public boolean isUseButterKnife() { return true; } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); EventBus.getDefault().register(this); } @Subscribe public void onEvent(String string) { } @Override public void init() { } @Override public void initRecyclerView() { } @Override public void initTabs() { } @Override public void showNetworkFail() { if (NetworkUtils.isConnected(getContext())){ showToast("加载失败!"); }else { showToast("网络不给力,请检查网络设置!"); } } @Override public void showNetworkFail(String err) { showToast(err); } @Override public void showToast(String toast) { ToastUtils.showToast( toast); } protected ViewDataBinding getBinding() { return binding; } @Override public View getView() { return binding.getRoot(); } @Override public void onResume() { super.onResume(); if (getPresenter() != null) { getPresenter().subscribe(); } } @Override public void onPause() { super.onPause(); if (getPresenter() != null) { getPresenter().unSubscribe(); } if (dialogHelper != null) { dialogHelper.dismissProgressDialog(); } } @Override public void onDestroy() { super.onDestroy(); EventBus.getDefault().unregister(this); } public void nextActivity(Class<?> cls) { Intent intent = new Intent(getContext(), cls); startActivity(intent); } public void nextActivity(Class<?> cls, Bundle bundle) { Intent intent = new Intent(); intent.setClass(getContext(), cls); if (bundle != null) { intent.putExtras(bundle); } startActivity(intent); } }
apache-2.0
vert-x3/vertx-codegen
src/test/java/io/vertx/test/codegen/testapi/MethodWithNotVertxGenObjectInHandler.java
294
package io.vertx.test.codegen.testapi; import io.vertx.core.Handler; import io.vertx.codegen.annotations.VertxGen; /** * @author <a href="http://tfox.org">Tim Fox</a> */ @VertxGen public interface MethodWithNotVertxGenObjectInHandler { void foo(Handler<NonVertxGenInterface> handler); }
apache-2.0
avafanasiev/groovy
subprojects/groovy-console/src/main/groovy/groovy/ui/ConsoleTextEditor.java
10802
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.ui; import groovy.ui.text.GroovyFilter; import groovy.ui.text.StructuredSyntaxResources; import groovy.ui.text.TextEditor; import groovy.ui.text.TextUndoManager; import org.codehaus.groovy.runtime.StringGroovyMethods; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultStyledDocument; import javax.swing.text.Document; import java.awt.*; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.event.ActionEvent; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.awt.print.PrinterJob; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.prefs.Preferences; /** * Component which provides a styled editor for the console. * * @author hippy * @author Danno Ferrin * @author Tim Yates * @author Guillaume Laforge */ public class ConsoleTextEditor extends JScrollPane { public String getDefaultFamily() { return defaultFamily; } public void setDefaultFamily(String defaultFamily) { this.defaultFamily = defaultFamily; } private class LineNumbersPanel extends JPanel { public LineNumbersPanel() { int initialSize = 3 * Preferences.userNodeForPackage(Console.class).getInt("fontSize", 12); setMinimumSize(new Dimension(initialSize, initialSize)); setPreferredSize(new Dimension(initialSize, initialSize)); } @Override public void paintComponent(Graphics g) { super.paintComponent(g); // starting position in document int start = textEditor.viewToModel(getViewport().getViewPosition()); // end position in document int end = textEditor.viewToModel(new Point(10, getViewport().getViewPosition().y + (int) textEditor.getVisibleRect().getHeight()) ); // translate offsets to lines Document doc = textEditor.getDocument(); int startline = doc.getDefaultRootElement().getElementIndex(start) + 1; int endline = doc.getDefaultRootElement().getElementIndex(end) + 1; Font f = textEditor.getFont(); int fontHeight = g.getFontMetrics(f).getHeight(); int fontDesc = g.getFontMetrics(f).getDescent(); int startingY = -1; try { startingY = textEditor.modelToView(start).y + fontHeight - fontDesc; } catch (BadLocationException e1) { System.err.println(e1.getMessage()); } g.setFont(f); for (int line = startline, y = startingY; line <= endline; y += fontHeight, line++) { String lineNumber = StringGroovyMethods.padLeft(Integer.toString(line), 4, " "); g.drawString(lineNumber, 0, y); } } } private String defaultFamily = "Monospaced"; private static final PrinterJob PRINTER_JOB = PrinterJob.getPrinterJob(); private LineNumbersPanel numbersPanel = new LineNumbersPanel(); private boolean documentChangedSinceLastRepaint = false; private TextEditor textEditor = new TextEditor(true, true, true) { public void paintComponent(Graphics g) { super.paintComponent(g); // only repaint the line numbers in the gutter when the document has changed // in case lines (hence line numbers) have been added or removed from the document if (documentChangedSinceLastRepaint) { numbersPanel.repaint(); documentChangedSinceLastRepaint = false; } } }; private UndoAction undoAction = new UndoAction(); private RedoAction redoAction = new RedoAction(); private PrintAction printAction = new PrintAction(); private boolean editable = true; private TextUndoManager undoManager; /** * Creates a new instance of ConsoleTextEditor */ public ConsoleTextEditor() { textEditor.setFont(new Font(defaultFamily, Font.PLAIN, Preferences.userNodeForPackage(Console.class).getInt("fontSize", 12))); setViewportView(new JPanel(new BorderLayout()) {{ add(numbersPanel, BorderLayout.WEST); add(textEditor, BorderLayout.CENTER); }}); textEditor.setDragEnabled(editable); getVerticalScrollBar().setUnitIncrement(10); initActions(); DefaultStyledDocument doc = new DefaultStyledDocument(); doc.setDocumentFilter(new GroovyFilter(doc)); textEditor.setDocument(doc); // add a document listener, to hint whether the line number gutter has to be repainted // when the number of lines changes doc.addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent documentEvent) { documentChangedSinceLastRepaint = true; } public void removeUpdate(DocumentEvent documentEvent) { documentChangedSinceLastRepaint = true; } public void changedUpdate(DocumentEvent documentEvent) { documentChangedSinceLastRepaint = true; int width = 3 * Preferences.userNodeForPackage(Console.class).getInt("fontSize", 12); numbersPanel.setPreferredSize(new Dimension(width, width)); } }); // create and add the undo/redo manager this.undoManager = new TextUndoManager(); doc.addUndoableEditListener(undoManager); // add the undo actions undoManager.addPropertyChangeListener(undoAction); undoManager.addPropertyChangeListener(redoAction); doc.addDocumentListener(undoAction); doc.addDocumentListener(redoAction); InputMap im = textEditor.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW); KeyStroke ks = KeyStroke.getKeyStroke(KeyEvent.VK_Z, InputEvent.CTRL_MASK, false); im.put(ks, StructuredSyntaxResources.UNDO); ActionMap am = textEditor.getActionMap(); am.put(StructuredSyntaxResources.UNDO, undoAction); ks = KeyStroke.getKeyStroke(KeyEvent.VK_Y, InputEvent.CTRL_MASK, false); im.put(ks, StructuredSyntaxResources.REDO); am.put(StructuredSyntaxResources.REDO, redoAction); ks = KeyStroke.getKeyStroke(KeyEvent.VK_P, InputEvent.CTRL_MASK, false); im.put(ks, StructuredSyntaxResources.PRINT); am.put(StructuredSyntaxResources.PRINT, printAction); } public void setShowLineNumbers(boolean showLineNumbers) { if (showLineNumbers) { setViewportView(new JPanel(new BorderLayout()) {{ add(numbersPanel, BorderLayout.WEST); add(textEditor, BorderLayout.CENTER); }}); } else { setViewportView(textEditor); } } public void setEditable(boolean editable) { textEditor.setEditable(editable); } public boolean clipBoardAvailable() { Transferable t = StructuredSyntaxResources.SYSTEM_CLIPBOARD.getContents(this); return t.isDataFlavorSupported(DataFlavor.stringFlavor); } public TextEditor getTextEditor() { return textEditor; } protected void initActions() { ActionMap map = getActionMap(); PrintAction printAction = new PrintAction(); map.put(StructuredSyntaxResources.PRINT, printAction); } private class PrintAction extends AbstractAction { public PrintAction() { setEnabled(true); } public void actionPerformed(ActionEvent ae) { PRINTER_JOB.setPageable(textEditor); try { if (PRINTER_JOB.printDialog()) { PRINTER_JOB.print(); } } catch (Exception e) { e.printStackTrace(); } } } // end ConsoleTextEditor.PrintAction private class RedoAction extends UpdateCaretListener implements PropertyChangeListener { public RedoAction() { setEnabled(false); } public void actionPerformed(ActionEvent ae) { undoManager.redo(); setEnabled(undoManager.canRedo()); undoAction.setEnabled(undoManager.canUndo()); super.actionPerformed(ae); } public void propertyChange(PropertyChangeEvent pce) { setEnabled(undoManager.canRedo()); } } // end ConsoleTextEditor.RedoAction private abstract class UpdateCaretListener extends AbstractAction implements DocumentListener { protected int lastUpdate; public void changedUpdate(DocumentEvent de) { } public void insertUpdate(DocumentEvent de) { lastUpdate = de.getOffset() + de.getLength(); } public void removeUpdate(DocumentEvent de) { lastUpdate = de.getOffset(); } public void actionPerformed(ActionEvent ae) { textEditor.setCaretPosition(lastUpdate); } } private class UndoAction extends UpdateCaretListener implements PropertyChangeListener { public UndoAction() { setEnabled(false); } public void actionPerformed(ActionEvent ae) { undoManager.undo(); setEnabled(undoManager.canUndo()); redoAction.setEnabled(undoManager.canRedo()); super.actionPerformed(ae); } public void propertyChange(PropertyChangeEvent pce) { setEnabled(undoManager.canUndo()); } } public Action getUndoAction() { return undoAction; } public Action getRedoAction() { return redoAction; } public Action getPrintAction() { return printAction; } }
apache-2.0
linkedin/WhereHows
metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java
9287
package com.linkedin.entity.client; import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.aspect.VersionedAspect; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; import com.linkedin.r2.RemoteInvocationException; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; public interface EntityClient { @Nonnull public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; @Nonnull public Map<Urn, EntityResponse> batchGetV2( @Nonnull String entityName, @Nonnull final Set<Urn> urns, @Nullable final Set<String> aspectNames, @Nonnull final Authentication authentication) throws Exception; @Nonnull public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** * Gets browse snapshot of a given path * * @param query search query * @param field field of the dataset * @param requestFilters autocomplete filters * @param limit max number of autocomplete results * @throws RemoteInvocationException */ @Nonnull public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, @Nonnull Map<String, String> requestFilters, @Nonnull int limit, @Nullable String field, @Nonnull Authentication authentication) throws RemoteInvocationException; /** * Gets browse snapshot of a given path * * @param query search query * @param requestFilters autocomplete filters * @param limit max number of autocomplete results * @throws RemoteInvocationException */ @Nonnull public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, @Nonnull Map<String, String> requestFilters, @Nonnull int limit, @Nonnull Authentication authentication) throws RemoteInvocationException; /** * Gets browse snapshot of a given path * * @param entityType entity type being browse * @param path path being browsed * @param requestFilters browse filters * @param start start offset of first dataset * @param limit max number of datasets * @throws RemoteInvocationException */ @Nonnull public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, @Nullable Map<String, String> requestFilters, int start, int limit, @Nonnull Authentication authentication) throws RemoteInvocationException; public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException; public void updateWithSystemMetadata(@Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, @Nonnull final Authentication authentication) throws RemoteInvocationException; public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** * Searches for entities matching to a given query and filters * * @param input search query * @param requestFilters search filters * @param start start offset for search results * @param count max number of search results requested * @return a set of search results * @throws RemoteInvocationException */ @Nonnull public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; /** * Filters for entities matching to a given query and filters * * @param requestFilters search filters * @param start start offset for search results * @param count max number of search results requested * @return a set of list results * @throws RemoteInvocationException */ @Nonnull public ListResult list(@Nonnull String entity, @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; /** * Searches for datasets matching to a given query and filters * * @param input search query * @param filter search filters * @param start start offset for search results * @param count max number of search results requested * @return Snapshot key * @throws RemoteInvocationException */ @Nonnull public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Filter filter, int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; /** * Searches for entities matching to a given query and filters across multiple entity types * * @param entities entity types to search (if empty, searches all entities) * @param input search query * @param filter search filters * @param start start offset for search results * @param count max number of search results requested * @return Snapshot key * @throws RemoteInvocationException */ @Nonnull public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter filter, int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; /** * Gets browse path(s) given dataset urn * * @param urn urn for the entity * @return list of paths given urn * @throws RemoteInvocationException */ @Nonnull public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; public void setWritable(boolean canWrite, @Nonnull Authentication authentication) throws RemoteInvocationException; @Nonnull public long getTotalEntityCount(@Nonnull String entityName, @Nonnull Authentication authentication) throws RemoteInvocationException; @Nonnull public Map<String, Long> batchGetTotalEntityCount(@Nonnull List<String> entityName, @Nonnull Authentication authentication) throws RemoteInvocationException; /** * List all urns existing for a particular Entity type. */ public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** * Hard delete an entity with a particular urn. */ public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** * Filters entities based on a particular Filter and Sort criterion * * @param entity filter entity * @param filter search filters * @param sortCriterion sort criterion * @param start start offset for search results * @param count max number of search results requested * @return a set of {@link SearchResult}s * @throws RemoteInvocationException */ @Nonnull public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; @Nullable public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @Nonnull Authentication authentication) throws RemoteInvocationException; @Nullable public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @Nonnull Authentication authentication) throws RemoteInvocationException; public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, @Nonnull Boolean getLatestValue, @Nullable Filter filter, @Nonnull Authentication authentication) throws RemoteInvocationException; public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, @Nonnull final Authentication authentication) throws RemoteInvocationException; @Nonnull public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @Nonnull Class<T> aspectClass, @Nonnull Authentication authentication) throws RemoteInvocationException; public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, @Nonnull Authentication authentication) throws RemoteInvocationException; }
apache-2.0
lemonJun/Emmet
src/main/java/com/takin/emmet/file/DiskUtil.java
2904
package com.takin.emmet.file; import java.io.File; import java.net.URL; import java.net.URLDecoder; import com.takin.emmet.string.StringUtil; /** * 磁盘操作工具类 * * @author howsun(zjh@58.com) * @Date 2010-10-22 * @version v0.1 */ public abstract class DiskUtil { /** * @return 当前账户的主目录全路径 */ public static String home() { return System.getProperty("user.home"); } /** * @param path * 相对用户主目录的路径 * @return 相对用户主目录的全路径 */ public static String home(String path) { return home() + path; } /** * 获取一个路径的绝对路径 * * @param path * 路径 * @return 绝对路径 */ public static String absolute(String path) { return absolute(path, FileUtil.class.getClassLoader(), CodingUtil.getDefaultEncoding()); } /** * 获取一个路径的绝对路径 * * @param path * 路径 * @param klassLoader * 参考 ClassLoader * @param enc * 路径编码方式 * @return 绝对路径 */ public static String absolute(String path, ClassLoader klassLoader, String enc) { path = normalize(path, enc); if (StringUtil.isEmpty(path)) return null; File f = new File(path); if (!f.exists()) { URL url = klassLoader.getResource(path); if (null == url) url = Thread.currentThread().getContextClassLoader().getResource(path); if (null == url) url = ClassLoader.getSystemResource(path); if (null != url) return normalize(url.getPath(), CodingUtil.UTF8);// 通过URL获取String,一律使用UTF-8编码进行解码 return null; } return path; } /** * 让路径变成正常路径,将 ~ 替换成用户主目录 * * @param path * 路径 * @return 正常化后的路径 */ public static String normalize(String path) { return normalize(path, CodingUtil.getDefaultEncoding()); } /** * 让路径变成正常路径,将 ~ 替换成用户主目录 * * @param path * 路径 * @param enc * 路径编码方式 * @return 正常化后的路径 */ public static String normalize(String path, String enc) { if (StringUtil.isEmpty(path)) return null; if (path.charAt(0) == '~') path = DiskUtil.home() + path.substring(1); try { return URLDecoder.decode(path, enc); } catch (Exception e) { return null; } } }
apache-2.0
michael-simons/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/data/cassandra/CassandraRepositoriesAutoConfigurationTests.java
5271
/* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.data.cassandra; import java.util.Set; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.TestAutoConfigurationPackage; import org.springframework.boot.autoconfigure.cassandra.CassandraAutoConfiguration; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.autoconfigure.data.alt.cassandra.CityCassandraRepository; import org.springframework.boot.autoconfigure.data.cassandra.city.City; import org.springframework.boot.autoconfigure.data.cassandra.city.CityRepository; import org.springframework.boot.autoconfigure.data.empty.EmptyDataPackage; import org.springframework.boot.test.context.assertj.AssertableApplicationContext; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.data.cassandra.core.mapping.CassandraMappingContext; import org.springframework.data.cassandra.repository.config.EnableCassandraRepositories; import org.springframework.test.util.ReflectionTestUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; /** * Tests for {@link CassandraRepositoriesAutoConfiguration}. * * @author Eddú Meléndez * @author Mark Paluch * @author Stephane Nicoll */ class CassandraRepositoriesAutoConfigurationTests { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner().withConfiguration( AutoConfigurations.of(CassandraAutoConfiguration.class, CassandraRepositoriesAutoConfiguration.class, CassandraDataAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class)); @Test void testDefaultRepositoryConfiguration() { this.contextRunner.withUserConfiguration(DefaultConfiguration.class).run((context) -> { assertThat(context).hasSingleBean(CityRepository.class); assertThat(context).hasSingleBean(CqlSessionBuilder.class); assertThat(getInitialEntitySet(context)).hasSize(1); }); } @Test void testNoRepositoryConfiguration() { this.contextRunner.withUserConfiguration(EmptyConfiguration.class).run((context) -> { assertThat(context).hasSingleBean(CqlSessionBuilder.class); assertThat(getInitialEntitySet(context)).isEmpty(); }); } @Test void doesNotTriggerDefaultRepositoryDetectionIfCustomized() { this.contextRunner.withUserConfiguration(CustomizedConfiguration.class).run((context) -> { assertThat(context).hasSingleBean(CityCassandraRepository.class); assertThat(getInitialEntitySet(context)).hasSize(1).containsOnly(City.class); }); } @Test void enablingReactiveRepositoriesDisablesImperativeRepositories() { this.contextRunner.withUserConfiguration(DefaultConfiguration.class) .withPropertyValues("spring.data.cassandra.repositories.type=reactive") .run((context) -> assertThat(context).doesNotHaveBean(CityCassandraRepository.class)); } @Test void enablingNoRepositoriesDisablesImperativeRepositories() { this.contextRunner.withUserConfiguration(DefaultConfiguration.class) .withPropertyValues("spring.data.cassandra.repositories.type=none") .run((context) -> assertThat(context).doesNotHaveBean(CityCassandraRepository.class)); } @SuppressWarnings("unchecked") private Set<Class<?>> getInitialEntitySet(AssertableApplicationContext context) { CassandraMappingContext mappingContext = context.getBean(CassandraMappingContext.class); return (Set<Class<?>>) ReflectionTestUtils.getField(mappingContext, "initialEntitySet"); } @Configuration(proxyBeanMethods = false) static class TestConfiguration { @Bean CqlSession cqlSession() { return mock(CqlSession.class); } } @Configuration(proxyBeanMethods = false) @TestAutoConfigurationPackage(EmptyDataPackage.class) @Import(TestConfiguration.class) static class EmptyConfiguration { } @Configuration(proxyBeanMethods = false) @TestAutoConfigurationPackage(City.class) @Import(TestConfiguration.class) static class DefaultConfiguration { } @Configuration(proxyBeanMethods = false) @TestAutoConfigurationPackage(CassandraRepositoriesAutoConfigurationTests.class) @EnableCassandraRepositories(basePackageClasses = CityCassandraRepository.class) @Import(TestConfiguration.class) static class CustomizedConfiguration { } }
apache-2.0
indigo-dc/orchestrator
src/main/java/it/reply/orchestrator/config/properties/ToscaProperties.java
1211
/* * Copyright © 2015-2021 I.N.F.N. * Copyright © 2015-2020 Santer Reply S.p.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.reply.orchestrator.config.properties; import javax.validation.constraints.NotNull; import lombok.Data; import lombok.NoArgsConstructor; import org.checkerframework.checker.nullness.qual.NonNull; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.core.io.Resource; import org.springframework.validation.annotation.Validated; @Validated @Data @ConfigurationProperties(prefix = "tosca") @NoArgsConstructor public class ToscaProperties { @NotNull @NonNull private Resource definitionsFolder; }
apache-2.0
leachbj/java-mqlight
mqlight/src/test/java/com/ibm/mqlight/api/impl/logging/logback/TestClientIdConverter.java
3526
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.ibm.mqlight.api.impl.logging.logback; import static org.junit.Assert.assertEquals; import org.junit.Test; import org.slf4j.MDC; import com.ibm.mqlight.api.impl.logging.LogMarker; import com.ibm.mqlight.api.logging.Logger; public class TestClientIdConverter { @Test public void testConvertWithClientIdNotSet() { MDC.clear(); final ClientIdConverter converter = new ClientIdConverter(); assertEquals("Unexpected convertion", "*", converter.convert(new MockILoggingEvent())); assertEquals("Unexpected convertion", "*", converter.convert(new MockILoggingEvent(null, LogMarker.ENTRY.getValue(), "message", new Object [] {}))); assertEquals("Unexpected convertion", "*", converter.convert(new MockILoggingEvent(null, LogMarker.EXIT.getValue(), "message", new Object [] {}))); MockILoggingEvent event = new MockILoggingEvent(); event.setNullMDC(true); assertEquals("Unexpected convertion", "*", converter.convert(new MockILoggingEvent())); } @Test public void testConvertWithClientIdSetToNull() { MDC.put(Logger.CLIENTID_KEY, null); final ClientIdConverter converter = new ClientIdConverter(); assertEquals("Unexpected convertion", "*", converter.convert(new MockILoggingEvent())); assertEquals("Unexpected convertion", "*", converter.convert(new MockILoggingEvent(null, LogMarker.ENTRY.getValue(), "message", new Object [] {}))); assertEquals("Unexpected convertion", "*", converter.convert(new MockILoggingEvent(null, LogMarker.EXIT.getValue(), "message", new Object [] {}))); } @Test public void testConvertWithClientIdSetBlank() { MDC.put(Logger.CLIENTID_KEY, ""); final ClientIdConverter converter = new ClientIdConverter(); assertEquals("Unexpected convertion", "", converter.convert(new MockILoggingEvent())); assertEquals("Unexpected convertion", "", converter.convert(new MockILoggingEvent(null, LogMarker.ENTRY.getValue(), "message", new Object [] {}))); assertEquals("Unexpected convertion", "", converter.convert(new MockILoggingEvent(null, LogMarker.EXIT.getValue(), "message", new Object [] {}))); } @Test public void testConvertWithClientIdSet() { MDC.put(Logger.CLIENTID_KEY, "id"); final ClientIdConverter converter = new ClientIdConverter(); assertEquals("Unexpected convertion", "id", converter.convert(new MockILoggingEvent())); assertEquals("Unexpected convertion", "id", converter.convert(new MockILoggingEvent(null, LogMarker.ENTRY.getValue(), "message", new Object [] {}))); assertEquals("Unexpected convertion", "id", converter.convert(new MockILoggingEvent(null, LogMarker.EXIT.getValue(), "message", new Object [] {}))); } }
apache-2.0
Uni-Sol/batik
sources/org/apache/batik/anim/AnimationEngine.java
21091
/* Copyright 2006 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.anim; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.apache.batik.anim.timing.TimedDocumentRoot; import org.apache.batik.anim.timing.TimedElement; import org.apache.batik.anim.timing.TimegraphListener; import org.apache.batik.anim.values.AnimatableValue; import org.apache.batik.dom.util.DoublyIndexedTable; import org.w3c.dom.Document; /** * An abstract base class for managing animation in a document. * * @author <a href="mailto:cam%40mcc%2eid%2eau">Cameron McCormack</a> * @version $Id$ */ public abstract class AnimationEngine { // Constants to identify the type of animation. public static final short ANIM_TYPE_XML = 0; public static final short ANIM_TYPE_CSS = 1; public static final short ANIM_TYPE_OTHER = 2; /** * The document this AnimationEngine is managing animation for. */ protected Document document; /** * The root time container for the document. */ protected TimedDocumentRoot timedDocumentRoot; /** * Map of AnimationTargets to TargetInfo objects. */ protected HashMap targets = new HashMap(); /** * Map of AbstractAnimations to AnimationInfo objects. */ protected HashMap animations = new HashMap(); /** * The listener object for animation target base value changes. */ protected Listener targetListener = new Listener(); /** * Creates a new AnimationEngine for the given document. */ public AnimationEngine(Document doc) { this.document = doc; timedDocumentRoot = createDocumentRoot(); } /** * Adds an animation to the document. * @param target the target element of the animation * @param type the type of animation (must be one of the * <code>ANIM_TYPE_*</code> constants defined in this class * @param ns the namespace URI of the attribute being animated, if * <code>type == </code>{@link #ANIM_TYPE_XML} * @param an the attribute name if <code>type == </code>{@link * #ANIM_TYPE_XML}, the property name if <code>type == </code> * {@link #ANIM_TYPE_CSS}, and the animation type otherwise * @param anim the animation */ public void addAnimation(AnimationTarget target, short type, String ns, String an, AbstractAnimation anim) { org.apache.batik.anim.timing.Trace.enter(this, "addAnimation", new Object[] { target, new Short[type], ns, an, anim } ); try { timedDocumentRoot.addChild(anim.getTimedElement()); AnimationInfo animInfo = getAnimationInfo(anim); animInfo.type = type; animInfo.attributeNamespaceURI = ns; animInfo.attributeLocalName = an; animInfo.target = target; animations.put(anim, animInfo); Sandwich sandwich = getSandwich(target, type, ns, an); if (sandwich.animation == null) { anim.lowerAnimation = null; anim.higherAnimation = null; } else { sandwich.animation.higherAnimation = anim; anim.lowerAnimation = sandwich.animation; anim.higherAnimation = null; } sandwich.animation = anim; if (anim.lowerAnimation == null) { sandwich.lowestAnimation = anim; } } finally { org.apache.batik.anim.timing.Trace.exit(); } } /** * Removes an animation from the document. */ public void removeAnimation(AbstractAnimation anim) { org.apache.batik.anim.timing.Trace.enter(this, "removeAnimation", new Object[] { anim } ); try { timedDocumentRoot.removeChild(anim.getTimedElement()); AbstractAnimation nextHigher = anim.higherAnimation; if (nextHigher != null) { nextHigher.markDirty(); } moveToBottom(anim); if (anim.higherAnimation != null) { anim.higherAnimation.lowerAnimation = null; } AnimationInfo animInfo = getAnimationInfo(anim); Sandwich sandwich = getSandwich(animInfo.target, animInfo.type, animInfo.attributeNamespaceURI, animInfo.attributeLocalName);; if (sandwich.animation == anim) { sandwich.animation = null; sandwich.lowestAnimation = null; sandwich.shouldUpdate = true; } } finally { org.apache.batik.anim.timing.Trace.exit(); } } /** * Returns the Sandwich for the given animation type/attribute. */ protected Sandwich getSandwich(AnimationTarget target, short type, String ns, String an) { TargetInfo info = getTargetInfo(target); Sandwich sandwich; if (type == ANIM_TYPE_XML) { sandwich = (Sandwich) info.xmlAnimations.get(ns, an); if (sandwich == null) { sandwich = new Sandwich(); info.xmlAnimations.put(ns, an, sandwich); } } else if (type == ANIM_TYPE_CSS) { sandwich = (Sandwich) info.cssAnimations.get(an); if (sandwich == null) { sandwich = new Sandwich(); info.cssAnimations.put(an, sandwich); } } else { sandwich = (Sandwich) info.otherAnimations.get(an); if (sandwich == null) { sandwich = new Sandwich(); info.otherAnimations.put(an, sandwich); } } return sandwich; } /** * Returns the TargetInfo for the given AnimationTarget. */ protected TargetInfo getTargetInfo(AnimationTarget target) { TargetInfo info = (TargetInfo) targets.get(target); if (info == null) { info = new TargetInfo(); targets.put(target, info); } return info; } /** * Returns the AnimationInfo for the given AbstractAnimation. */ protected AnimationInfo getAnimationInfo(AbstractAnimation anim) { AnimationInfo info = (AnimationInfo) animations.get(anim); if (info == null) { info = new AnimationInfo(); animations.put(anim, info); } return info; } /** * Updates the animations in the document to the given document time. */ protected void tick(float time) { timedDocumentRoot.seekTo(time); Iterator i = targets.entrySet().iterator(); while (i.hasNext()) { Map.Entry e = (Map.Entry) i.next(); AnimationTarget target = (AnimationTarget) e.getKey(); TargetInfo info = (TargetInfo) e.getValue(); // Update the XML animations. Iterator j = info.xmlAnimations.iterator(); while (j.hasNext()) { DoublyIndexedTable.Entry e2 = (DoublyIndexedTable.Entry) j.next(); String namespaceURI = (String) e2.getKey1(); String localName = (String) e2.getKey2(); Sandwich sandwich = (Sandwich) e2.getValue(); if (sandwich.shouldUpdate || sandwich.animation.isDirty) { AnimatableValue av = sandwich.animation.getComposedValue(); boolean usesUnderlying = sandwich.lowestAnimation.usesUnderlyingValue(); if (usesUnderlying && !sandwich.listenerRegistered) { target.addTargetListener(namespaceURI, localName, false, targetListener); sandwich.listenerRegistered = true; } else if (!usesUnderlying && sandwich.listenerRegistered) { target.removeTargetListener(namespaceURI, localName, false, targetListener); sandwich.listenerRegistered = false; } target.updateAttributeValue(namespaceURI, localName, av); sandwich.shouldUpdate = false; sandwich.animation.isDirty = false; } } // Update the CSS animations. j = info.cssAnimations.entrySet().iterator(); while (j.hasNext()) { Map.Entry e2 = (Map.Entry) j.next(); String propertyName = (String) e2.getKey(); Sandwich sandwich = (Sandwich) e2.getValue(); if (sandwich.shouldUpdate || sandwich.animation.isDirty) { AnimatableValue av = sandwich.animation.getComposedValue(); boolean usesUnderlying = sandwich.lowestAnimation.usesUnderlyingValue(); if (usesUnderlying && !sandwich.listenerRegistered) { target.addTargetListener(null, propertyName, true, targetListener); sandwich.listenerRegistered = true; } else if (!usesUnderlying && sandwich.listenerRegistered) { target.removeTargetListener(null, propertyName, true, targetListener); sandwich.listenerRegistered = false; } if (usesUnderlying) { target.updatePropertyValue(propertyName, null); } if (!(usesUnderlying && av == null)) { target.updatePropertyValue(propertyName, av); } sandwich.shouldUpdate = false; sandwich.animation.isDirty = false; } } // Update the other animations. j = info.otherAnimations.entrySet().iterator(); while (j.hasNext()) { Map.Entry e2 = (Map.Entry) j.next(); String type = (String) e2.getKey(); Sandwich sandwich = (Sandwich) e2.getValue(); if (sandwich.shouldUpdate || sandwich.animation.isDirty) { AnimatableValue av = sandwich.animation.getComposedValue(); target.updateOtherValue(type, av); sandwich.shouldUpdate = false; sandwich.animation.isDirty = false; } } } } /** * Invoked to indicate an animation became active at the specified time. * * @param anim the animation * @param begin the time the element became active, in document simple time */ public void toActive(AbstractAnimation anim, float begin) { moveToTop(anim); anim.isActive = true; anim.beginTime = begin; anim.isFrozen = false; // Move the animation down, in case it began at the same time as another // animation in the sandwich and it's earlier in document order. pushDown(anim); anim.markDirty(); } /** * Moves the animation down the sandwich such that it is in the right * position according to begin time and document order. */ protected void pushDown(AbstractAnimation anim) { TimedElement e = anim.getTimedElement(); AbstractAnimation top = null; boolean moved = false; while (anim.lowerAnimation != null && (anim.lowerAnimation.isActive || anim.lowerAnimation.isFrozen) && (anim.lowerAnimation.beginTime > anim.beginTime || anim.lowerAnimation.beginTime == anim.beginTime && e.isBefore(anim.lowerAnimation.getTimedElement()))) { AbstractAnimation higher = anim.higherAnimation; AbstractAnimation lower = anim.lowerAnimation; AbstractAnimation lowerLower = lower.lowerAnimation; if (higher != null) { higher.lowerAnimation = lower; } if (lowerLower != null) { lowerLower.higherAnimation = anim; } lower.lowerAnimation = anim; lower.higherAnimation = higher; anim.lowerAnimation = lowerLower; anim.higherAnimation = lower; if (!moved) { top = lower; moved = true; } } if (moved) { AnimationInfo animInfo = getAnimationInfo(anim); Sandwich sandwich = getSandwich(animInfo.target, animInfo.type, animInfo.attributeNamespaceURI, animInfo.attributeLocalName); if (sandwich.animation == anim) { sandwich.animation = top; } if (anim.lowerAnimation == null) { sandwich.lowestAnimation = anim; } } } /** * Invoked to indicate that this timed element became inactive. * * @param anim the animation * @param isFrozen whether the element is frozen or not */ public void toInactive(AbstractAnimation anim, boolean isFrozen) { anim.isActive = false; anim.isFrozen = isFrozen; if (!isFrozen) { anim.value = null; } anim.markDirty(); if (!isFrozen) { moveToBottom(anim); } } /** * Invoked to indicate that this timed element has had its fill removed. */ public void removeFill(AbstractAnimation anim) { anim.isActive = false; anim.isFrozen = false; anim.value = null; anim.markDirty(); moveToBottom(anim); } /** * Moves the given animation to the top of the sandwich. */ protected void moveToTop(AbstractAnimation anim) { AnimationInfo animInfo = getAnimationInfo(anim); Sandwich sandwich = getSandwich(animInfo.target, animInfo.type, animInfo.attributeNamespaceURI, animInfo.attributeLocalName); sandwich.shouldUpdate = true; if (anim.higherAnimation == null) { return; } if (anim.lowerAnimation == null) { sandwich.lowestAnimation = anim.higherAnimation; } else { anim.lowerAnimation.higherAnimation = anim.higherAnimation; } anim.higherAnimation.lowerAnimation = anim.lowerAnimation; if (sandwich.animation != null) { sandwich.animation.higherAnimation = anim; } anim.lowerAnimation = sandwich.animation; anim.higherAnimation = null; sandwich.animation = anim; } /** * Moves the given animation to the bottom of the sandwich. */ protected void moveToBottom(AbstractAnimation anim) { if (anim.lowerAnimation == null) { return; } AnimationInfo animInfo = getAnimationInfo(anim); Sandwich sandwich = getSandwich(animInfo.target, animInfo.type, animInfo.attributeNamespaceURI, animInfo.attributeLocalName); AbstractAnimation nextLower = anim.lowerAnimation; nextLower.markDirty(); anim.lowerAnimation.higherAnimation = anim.higherAnimation; if (anim.higherAnimation != null) { anim.higherAnimation.lowerAnimation = anim.lowerAnimation; } else { sandwich.animation = nextLower; sandwich.shouldUpdate = true; } sandwich.lowestAnimation.lowerAnimation = anim; anim.higherAnimation = sandwich.lowestAnimation; anim.lowerAnimation = null; sandwich.lowestAnimation = anim; if (sandwich.animation.isDirty) { sandwich.shouldUpdate = true; } } /** * Adds a {@link TimegraphListener} to the document. */ public void addTimegraphListener(TimegraphListener l) { timedDocumentRoot.addTimegraphListener(l); } /** * Removes a {@link TimegraphListener} from the document. */ public void removeTimegraphListener(TimegraphListener l) { timedDocumentRoot.removeTimegraphListener(l); } /** * Invoked to indicate that this timed element has been sampled at the given * time. * * @param anim the animation * @param simpleTime the sample time in local simple time * @param simpleDur the simple duration of the element * @param repeatIteration the repeat iteration during which the element was * sampled */ public void sampledAt(AbstractAnimation anim, float simpleTime, float simpleDur, int repeatIteration) { anim.sampledAt(simpleTime, simpleDur, repeatIteration); } /** * Invoked to indicate that this timed element has been sampled at the end * of its active time, at an integer multiple of the simple duration. This * is the "last" value that will be used for filling, which cannot be * sampled normally. */ public void sampledLastValue(AbstractAnimation anim, int repeatIteration) { anim.sampledLastValue(repeatIteration); } /** * Creates a new returns a new TimedDocumentRoot object for the document. */ protected abstract TimedDocumentRoot createDocumentRoot(); /** * Listener class for changes to base values on a target element. */ protected class Listener implements AnimationTargetListener { /** * Invoked to indicate that base value of the specified attribute * or property has changed. */ public void baseValueChanged(AnimationTarget t, String ns, String ln, boolean isCSS) { short type = isCSS ? ANIM_TYPE_CSS : ANIM_TYPE_XML; Sandwich sandwich = getSandwich(t, type, ns, ln); sandwich.shouldUpdate = true; AbstractAnimation anim = sandwich.animation; while (anim.lowerAnimation != null) { anim = anim.lowerAnimation; } anim.markDirty(); } } /** * Class to hold XML and CSS animations for a target element. */ protected static class TargetInfo { /** * Map of XML attribute names to the corresponding {@link Sandwich} * objects. */ public DoublyIndexedTable xmlAnimations = new DoublyIndexedTable(); /** * Map of CSS attribute names to the corresponding {@link Sandwich} * objects. */ public HashMap cssAnimations = new HashMap(); /** * Map of other animation types to the corresponding {@link Sandwich} * objects. */ public HashMap otherAnimations = new HashMap(); } /** * Class to hold an animation sandwich for a particular attribute. */ protected static class Sandwich { /** * The top-most animation in the sandwich. */ public AbstractAnimation animation; /** * The bottom-most animation in the sandwich. */ public AbstractAnimation lowestAnimation; /** * Whether the animation needs to have its value copied into the * document. */ public boolean shouldUpdate; /** * Whether an {@link AnimationEngineListener} has been registered to * listen for changes to the base value. */ public boolean listenerRegistered; } /** * Class to hold target information of an animation. */ protected static class AnimationInfo { /** * The target of the animation. */ public AnimationTarget target; /** * The type of animation. Must be one of the <code>ANIM_TYPE_*</code> * constants defined in {@link AnimationEngine}. */ public short type; /** * The namespace URI of the attribute to animate, if this is an XML * attribute animation. */ public String attributeNamespaceURI; /** * The local name of the attribute or the name of the CSS property to * animate. */ public String attributeLocalName; } }
apache-2.0
RalphSu/algo-pratice
src/main/java/btree/Btree.java
211
/** * */ package btree; /** * If you can not handle, refer to https://github.com/torvalds/linux/blob/39caa0916ef27cf1da5026eb708a2b8413156f75/lib/btree.c * * @author ralph * */ public class Btree { }
apache-2.0
rohanpatel2602/okhttp
okhttp/src/main/java/com/squareup/okhttp/ConnectionPool.java
9833
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp; import com.squareup.okhttp.internal.Platform; import com.squareup.okhttp.internal.Util; import java.net.SocketException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** * Manages reuse of HTTP and SPDY connections for reduced network latency. HTTP * requests that share the same {@link com.squareup.okhttp.Address} may share a * {@link com.squareup.okhttp.Connection}. This class implements the policy of * which connections to keep open for future use. * * <p>The {@link #getDefault() system-wide default} uses system properties for * tuning parameters: * <ul> * <li>{@code http.keepAlive} true if HTTP and SPDY connections should be * pooled at all. Default is true. * <li>{@code http.maxConnections} maximum number of idle connections to * each to keep in the pool. Default is 5. * <li>{@code http.keepAliveDuration} Time in milliseconds to keep the * connection alive in the pool before closing it. Default is 5 minutes. * This property isn't used by {@code HttpURLConnection}. * </ul> * * <p>The default instance <i>doesn't</i> adjust its configuration as system * properties are changed. This assumes that the applications that set these * parameters do so before making HTTP connections, and that this class is * initialized lazily. */ public final class ConnectionPool { private static final int MAX_CONNECTIONS_TO_CLEANUP = 2; private static final long DEFAULT_KEEP_ALIVE_DURATION_MS = 5 * 60 * 1000; // 5 min private static final ConnectionPool systemDefault; static { String keepAlive = System.getProperty("http.keepAlive"); String keepAliveDuration = System.getProperty("http.keepAliveDuration"); String maxIdleConnections = System.getProperty("http.maxConnections"); long keepAliveDurationMs = keepAliveDuration != null ? Long.parseLong(keepAliveDuration) : DEFAULT_KEEP_ALIVE_DURATION_MS; if (keepAlive != null && !Boolean.parseBoolean(keepAlive)) { systemDefault = new ConnectionPool(0, keepAliveDurationMs); } else if (maxIdleConnections != null) { systemDefault = new ConnectionPool(Integer.parseInt(maxIdleConnections), keepAliveDurationMs); } else { systemDefault = new ConnectionPool(5, keepAliveDurationMs); } } /** The maximum number of idle connections for each address. */ private final int maxIdleConnections; private final long keepAliveDurationNs; private final LinkedList<Connection> connections = new LinkedList<Connection>(); /** We use a single background thread to cleanup expired connections. */ private final ExecutorService executorService = new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), Util.threadFactory("OkHttp ConnectionPool", true)); private final Runnable connectionsCleanupRunnable = new Runnable() { @Override public void run() { List<Connection> expiredConnections = new ArrayList<Connection>(MAX_CONNECTIONS_TO_CLEANUP); int idleConnectionCount = 0; synchronized (ConnectionPool.this) { for (ListIterator<Connection> i = connections.listIterator(connections.size()); i.hasPrevious(); ) { Connection connection = i.previous(); if (!connection.isAlive() || connection.isExpired(keepAliveDurationNs)) { i.remove(); expiredConnections.add(connection); if (expiredConnections.size() == MAX_CONNECTIONS_TO_CLEANUP) break; } else if (connection.isIdle()) { idleConnectionCount++; } } for (ListIterator<Connection> i = connections.listIterator(connections.size()); i.hasPrevious() && idleConnectionCount > maxIdleConnections; ) { Connection connection = i.previous(); if (connection.isIdle()) { expiredConnections.add(connection); i.remove(); --idleConnectionCount; } } } for (Connection expiredConnection : expiredConnections) { Util.closeQuietly(expiredConnection.getSocket()); } } }; public ConnectionPool(int maxIdleConnections, long keepAliveDurationMs) { this.maxIdleConnections = maxIdleConnections; this.keepAliveDurationNs = keepAliveDurationMs * 1000 * 1000; } /** * Returns a snapshot of the connections in this pool, ordered from newest to * oldest. Waits for the cleanup callable to run if it is currently scheduled. */ List<Connection> getConnections() { waitForCleanupCallableToRun(); synchronized (this) { return new ArrayList<Connection>(connections); } } /** * Blocks until the executor service has processed all currently enqueued * jobs. */ private void waitForCleanupCallableToRun() { try { executorService.submit(new Runnable() { @Override public void run() { } }).get(); } catch (Exception e) { throw new AssertionError(); } } public static ConnectionPool getDefault() { return systemDefault; } /** Returns total number of connections in the pool. */ public synchronized int getConnectionCount() { return connections.size(); } /** Returns total number of spdy connections in the pool. */ public synchronized int getSpdyConnectionCount() { int total = 0; for (Connection connection : connections) { if (connection.isSpdy()) total++; } return total; } /** Returns total number of http connections in the pool. */ public synchronized int getHttpConnectionCount() { int total = 0; for (Connection connection : connections) { if (!connection.isSpdy()) total++; } return total; } /** Returns a recycled connection to {@code address}, or null if no such connection exists. */ public synchronized Connection get(Address address) { Connection foundConnection = null; for (ListIterator<Connection> i = connections.listIterator(connections.size()); i.hasPrevious(); ) { Connection connection = i.previous(); if (!connection.getRoute().getAddress().equals(address) || !connection.isAlive() || System.nanoTime() - connection.getIdleStartTimeNs() >= keepAliveDurationNs) { continue; } i.remove(); if (!connection.isSpdy()) { try { Platform.get().tagSocket(connection.getSocket()); } catch (SocketException e) { Util.closeQuietly(connection.getSocket()); // When unable to tag, skip recycling and close Platform.get().logW("Unable to tagSocket(): " + e); continue; } } foundConnection = connection; break; } if (foundConnection != null && foundConnection.isSpdy()) { connections.addFirst(foundConnection); // Add it back after iteration. } executorService.execute(connectionsCleanupRunnable); return foundConnection; } /** * Gives {@code connection} to the pool. The pool may store the connection, * or close it, as its policy describes. * * <p>It is an error to use {@code connection} after calling this method. */ void recycle(Connection connection) { if (connection.isSpdy()) { return; } if (!connection.clearOwner()) { return; // This connection isn't eligible for reuse. } if (!connection.isAlive()) { Util.closeQuietly(connection.getSocket()); return; } try { Platform.get().untagSocket(connection.getSocket()); } catch (SocketException e) { // When unable to remove tagging, skip recycling and close. Platform.get().logW("Unable to untagSocket(): " + e); Util.closeQuietly(connection.getSocket()); return; } synchronized (this) { connections.addFirst(connection); connection.incrementRecycleCount(); connection.resetIdleStartTime(); } executorService.execute(connectionsCleanupRunnable); } /** * Shares the SPDY connection with the pool. Callers to this method may * continue to use {@code connection}. */ void share(Connection connection) { if (!connection.isSpdy()) throw new IllegalArgumentException(); executorService.execute(connectionsCleanupRunnable); if (connection.isAlive()) { synchronized (this) { connections.addFirst(connection); } } } /** Close and remove all connections in the pool. */ public void evictAll() { List<Connection> connections; synchronized (this) { connections = new ArrayList<Connection>(this.connections); this.connections.clear(); } for (int i = 0, size = connections.size(); i < size; i++) { Util.closeQuietly(connections.get(i).getSocket()); } } }
apache-2.0
blackcathacker/kc.preclean
coeus-code/src/main/java/org/kuali/coeus/propdev/impl/location/SaveProposalSitesRule.java
1178
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.coeus.propdev.impl.location; import org.kuali.coeus.propdev.impl.location.SaveProposalSitesEvent; import org.kuali.rice.krad.rules.rule.BusinessRule; public interface SaveProposalSitesRule extends BusinessRule { /** * A rule that checks all Proposal Sites of a proposal when saving the * <code>{@link org.kuali.coeus.propdev.impl.core.ProposalDevelopmentDocument}</code> * * @return boolean */ public boolean processSaveProposalSiteBusinessRules(SaveProposalSitesEvent saveProposalSitesEvent); }
apache-2.0
hivemq/hivemq-spi
src/main/java/com/hivemq/spi/services/AsyncSubscriptionStore.java
9239
/* * Copyright 2014 dc-square GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hivemq.spi.services; import com.google.common.collect.Multimap; import com.google.common.util.concurrent.ListenableFuture; import com.hivemq.spi.annotations.NotNull; import com.hivemq.spi.annotations.ReadOnly; import com.hivemq.spi.message.Topic; import com.hivemq.spi.services.exception.NoSuchClientIdException; import com.hivemq.spi.services.exception.RateLimitExceededException; import com.hivemq.spi.topic.exception.InvalidTopicException; import java.util.Set; /** * The subscription store allows the management of all client subscriptions from within a plugin * * @author Lukas Brandl * @author Florian Limpöck * @since 3.1 */ public interface AsyncSubscriptionStore { /** * This method returns all subscriptions on this HiveMQ Node as a {@link com.google.common.collect.Multimap} of client identifiers and topics. * You won't receive subscriptions of connected * clients from other HiveMQ nodes if HiveMQ runs in a cluster. * <p/> * Please be aware that calling this method on HiveMQ instances with many subscriptions could have * negative performance effects. * <p/> * The returned Multimap is read-only and must not be modified. * * @return a {@link ListenableFuture} which contains a {@link com.google.common.collect.Multimap} of client identifiers and their topic subscriptions * failing with a {@link RateLimitExceededException} if the plugin service rate limit is exceeded. */ @ReadOnly ListenableFuture<Multimap<String, Topic>> getLocalSubscriptions(); /** * Returns all MQTT client subscriber identifiers for a given topic, for this HiveMQ instance. * MQTT Wildcards are allowed. * <p/> * Don't pass <code>null</code> as topic. This method is lenient, so * it will just return an empty Set. * <p/> * The returned Set is read-only and must not be modified. * * @param topic the topic * @return a {@link ListenableFuture} which contains the client identifiers of all subscribers that subscribed to the topic * failing with a {@link RateLimitExceededException} if the plugin service rate limit is exceeded. */ @ReadOnly ListenableFuture<Set<String>> getLocalSubscribers(@NotNull String topic); /** * Returns all topics a client is subscribed to, on this HiveMQ instance. * <p/> * If the client does not exist, an empty Set is returned. * <p/> * Don't pass <code>null</code> as clientId. This method is lenient, so * it will just return an empty Set. * <p/> * The returned Set is read-only and must not be modified. * * @param clientID of the client * @return a {@link ListenableFuture} which contains all topics the client subscribed to * failing with a {@link RateLimitExceededException} if the plugin service rate limit is exceeded. */ @ReadOnly ListenableFuture<Set<Topic>> getLocalTopics(@NotNull String clientID); /** * This method adds a subscription for a certain client to a certain topic. * If HiveMQ is connected to a cluster, the subscription will be broadcast to all other Cluster Nodes. * <p/> * This method is lenient, so if the clientId or the topic * is <code>null</code>, nothing will happen. * * @param clientID client, which should be subscribed * @param topic topic to which the client should be subscribed * @return a {@link ListenableFuture} object that will succeed, as soon es the subscription was added by all Cluster Nodes. * failing with a {@link RateLimitExceededException} if the plugin service rate limit was exceeded. */ ListenableFuture<Void> addSubscription(@NotNull String clientID, @NotNull Topic topic); /** * This method adds subscriptions for a certain client to certain topics. * If HiveMQ is connected to a cluster, the subscription will be broadcast to all other Cluster Nodes. * <p> * * @param clientID client, which should be subscribed * @param topics topics to which the client should be subscribed * @return a {@link ListenableFuture} object that will succeed, as soon es the subscriptions were added by all Cluster Nodes. * failing with a {@link RateLimitExceededException} if the plugin service rate limit was exceeded. * failing with a {@link InvalidTopicException} if any topic is not utf-8 well-formed or empty. * failing with a {@link NoSuchClientIdException} if a client does not exist. * <p> * @throws NullPointerException if clientID or topics is <code>null</code>. * @throws IllegalArgumentException if clientID or topics is empty. */ ListenableFuture<Void> addSubscriptions(@NotNull String clientID, @NotNull Set<Topic> topics); /** * This method removes a subscription for a certain client and a certain topic. * If HiveMQ is connected to a cluster, the subscription will be removed by other Cluster Nodes as well. * * @param clientID client, which should get unsubscribed * @param topic topic from which the client should get unsubscribed * @return a {@link ListenableFuture} object that will succeed, as soon es the subscription was removed by all Cluster Nodes. * failing with a {@link RateLimitExceededException} if the plugin service rate limit was exceeded. */ ListenableFuture<Void> removeSubscription(@NotNull String clientID, @NotNull String topic); /** * This method removes subscriptions for a certain client and certain topics. * If HiveMQ is connected to a cluster, the subscriptions will be removed by other Cluster Nodes as well. * <p> * * @param clientID client, which should get unsubscribed * @param topics topics from which the client should get unsubscribed * @return a {@link ListenableFuture} object that will succeed, as soon es the subscriptions were removed by all Cluster Nodes. * failing with a {@link RateLimitExceededException} if the plugin service rate limit was exceeded. * <p> * @throws NullPointerException if clientID or topics is <code>null</code>. * @throws IllegalArgumentException if clientID or topics is empty. */ ListenableFuture<Void> removeSubscriptions(@NotNull String clientID, @NotNull Set<String> topics); /** * This method returns all subscriptions this HiveMQ instance and all other nodes in a HiveMQ cluster, * as a {@link com.google.common.collect.Multimap} of client identifiers and topics. * <p/> * Please be aware that calling this method on HiveMQ instances with many subscriptions could have * negative performance effects. * <p/> * The returned Multimap is read-only and must not be modified. * * @return a {@link ListenableFuture} which contains a {@link com.google.common.collect.Multimap} of client identifiers and their topic subscriptions * failing with a {@link RateLimitExceededException} if the plugin service rate limit was exceeded. */ @ReadOnly ListenableFuture<Multimap<String, Topic>> getSubscriptions(); /** * Returns all MQTT client subscriber identifiers for a given topic, this HiveMQ instance and all other nodes in a HiveMQ cluster. * MQTT Wildcards are allowed. * <p/> * Don't pass <code>null</code> as topic. This method is lenient, so * it will just return an empty Set. * <p/> * The returned Set is read-only and must not be modified. * * @param topic the topic * @return a {@link ListenableFuture} which contains the client identifiers of all subscribers that subscribed to the topic * failing with a {@link RateLimitExceededException} if the plugin service rate limit was exceeded. */ @ReadOnly ListenableFuture<Set<String>> getSubscribers(@NotNull String topic); /** * Returns all topics a client is subscribed to, on this HiveMQ instance and all other nodes in a HiveMQ cluster. * <p/> * If the client does not exist, an empty Set is returned. * <p/> * Don't pass <code>null</code> as clientId. This method is lenient, so * it will just return an empty Set. * <p/> * The returned Set is read-only and must not be modified. * * @param clientID of the client * @return a {@link ListenableFuture} which contains which contains all topics the client subscribed to * failing with a {@link RateLimitExceededException} if the plugin service rate limit was exceeded. */ @ReadOnly ListenableFuture<Set<Topic>> getTopics(@NotNull String clientID); }
apache-2.0
lhzheng880828/AndroidApp
AndroidDemo/Hello/src/cn/itcast/activity/MainActivity.java
348
package cn.itcast.activity; import android.app.Activity; import android.os.Bundle; public class MainActivity extends Activity { /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); } }
apache-2.0
SSEHUB/EASyProducer
Plugins/EASy-Producer/ScenariosTest/testdata/real/QualiMaster/jun15/expected/if-gen/eu/qualimaster/families/imp/FCorrelationTwitter.java
4043
package eu.qualimaster.families.imp; import eu.qualimaster.families.inf.*; import eu.qualimaster.observables.IObservable; /** * The implementation for the algorithm family "IFCorrelationTwitter" (GEN). */ public class FCorrelationTwitter implements IFCorrelationTwitter { /** * Provides a default implementation of the data input for the {@link IFCorrelationTwitterAnalyzedStreamInput} algorithm. * Actual execution system code may also directly wrap the respective tuple concept. */ public static class IFCorrelationTwitterAnalyzedStreamInput implements IIFCorrelationTwitterAnalyzedStreamInput { private String symbolId; private long timestamp; private double value; private int volume; @Override public String getSymbolId(){ return symbolId; } @Override public void setSymbolId(String symbolId){ this.symbolId = symbolId; } @Override public long getTimestamp(){ return timestamp; } @Override public void setTimestamp(long timestamp){ this.timestamp = timestamp; } @Override public double getValue(){ return value; } @Override public void setValue(double value){ this.value = value; } @Override public int getVolume(){ return volume; } @Override public void setVolume(int volume){ this.volume = volume; } } /** * Provides a default implementation of the data input for the {@link IFCorrelationTwitterSymbolListInput} algorithm. * Actual execution system code may also directly wrap the respective tuple concept. */ public static class IFCorrelationTwitterSymbolListInput implements IIFCorrelationTwitterSymbolListInput { private java.util.List<String> allSymbols; @Override public java.util.List<String> getAllSymbols(){ return allSymbols; } @Override public void setAllSymbols(java.util.List<String> allSymbols){ this.allSymbols = allSymbols; } } /** * Provides a default implementation of the data output for the {@link IFCorrelationTwitterPairwiseTwitterOutput} algorithm. * Actual execution system code may also directly wrap the respective tuple concept. */ public static class IFCorrelationTwitterPairwiseTwitterOutput implements IIFCorrelationTwitterPairwiseTwitterOutput { private String pairwiseCorrelationTwitter; @Override public String getPairwiseCorrelationTwitter(){ return pairwiseCorrelationTwitter; } @Override public void setPairwiseCorrelationTwitter(String pairwiseCorrelationTwitter){ this.pairwiseCorrelationTwitter = pairwiseCorrelationTwitter; } } /** * Calculates the actual <code>result</code> for <code>input</code>. * * @param input the algorithm input taken from the input stream * @param result the result calculated by the algorithm */ @Override public void calculate(IIFCorrelationTwitterAnalyzedStreamInput input, IIFCorrelationTwitterPairwiseTwitterOutput result){ } /** * Calculates the actual <code>result</code> for <code>input</code>. * * @param input the algorithm input taken from the input stream * @param result the result calculated by the algorithm */ @Override public void calculate(IIFCorrelationTwitterSymbolListInput input, IIFCorrelationTwitterPairwiseTwitterOutput result){ } /** * Sets the algorithm parameter "windowSize". * * @param value the new value of the algorithm parameter */ public void setParameterWindowSize(int value) { } @Override public void switchState(State arg0) { } @Override public Double getMeasurement(IObservable arg0) { return null; } }
apache-2.0
hgschmie/apache-maven-plugins
maven-shade-plugin/src/main/java/org/apache/maven/plugins/shade/mojo/ShadeMojo.java
44929
package org.apache.maven.plugins.shade.mojo; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Writer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Dependency; import org.apache.maven.model.Exclusion; import org.apache.maven.model.Model; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.plugins.shade.ShadeRequest; import org.apache.maven.plugins.shade.Shader; import org.apache.maven.plugins.shade.filter.Filter; import org.apache.maven.plugins.shade.filter.MinijarFilter; import org.apache.maven.plugins.shade.filter.SimpleFilter; import org.apache.maven.plugins.shade.pom.PomWriter; import org.apache.maven.plugins.shade.relocation.Relocator; import org.apache.maven.plugins.shade.relocation.SimpleRelocator; import org.apache.maven.plugins.shade.resource.ResourceTransformer; import org.apache.maven.project.DefaultProjectBuildingRequest; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.apache.maven.project.ProjectBuilder; import org.apache.maven.project.ProjectBuildingException; import org.apache.maven.project.ProjectBuildingRequest; import org.apache.maven.project.ProjectBuildingResult; import org.apache.maven.shared.artifact.DefaultArtifactCoordinate; import org.apache.maven.shared.artifact.resolve.ArtifactResolver; import org.apache.maven.shared.artifact.resolve.ArtifactResolverException; import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder; import org.apache.maven.shared.dependency.graph.DependencyGraphBuilderException; import org.apache.maven.shared.dependency.graph.DependencyNode; import org.codehaus.plexus.PlexusConstants; import org.codehaus.plexus.PlexusContainer; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import org.codehaus.plexus.context.Context; import org.codehaus.plexus.context.ContextException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.WriterFactory; /** * Mojo that performs shading delegating to the Shader component. * * @author Jason van Zyl * @author Mauro Talevi * @author David Blevins * @author Hiram Chirino */ // CHECKSTYLE_OFF: LineLength @Mojo( name = "shade", defaultPhase = LifecyclePhase.PACKAGE, threadSafe = true, requiresDependencyResolution = ResolutionScope.RUNTIME ) // CHECKSTYLE_ON: LineLength public class ShadeMojo extends AbstractMojo implements Contextualizable { /** * The current Maven session. */ @Parameter( defaultValue = "${session}", readonly = true, required = true ) private MavenSession session; /** * The current Maven project. */ @Parameter( defaultValue = "${project}", readonly = true, required = true ) private MavenProject project; @Component private MavenProjectHelper projectHelper; @Component( hint = "default", role = org.apache.maven.plugins.shade.Shader.class ) private Shader shader; /** * The dependency graph builder to use. */ @Component private DependencyGraphBuilder dependencyGraphBuilder; /** * ProjectBuilder, needed to create projects from the artifacts. */ @Component private ProjectBuilder projectBuilder; /** * Remote repositories which will be searched for source attachments. */ @Parameter( readonly = true, required = true, defaultValue = "${project.remoteArtifactRepositories}" ) protected List<ArtifactRepository> remoteArtifactRepositories; /** * Local maven repository. */ @Parameter( readonly = true, required = true, defaultValue = "${localRepository}" ) protected ArtifactRepository localRepository; /** * Artifact resolver, needed to download source jars for inclusion in classpath. */ @Component protected ArtifactResolver artifactResolver; /** * Artifacts to include/exclude from the final artifact. Artifacts are denoted by composite identifiers of the * general form <code>groupId:artifactId:type:classifier</code>. Since version 1.3, the wildcard characters '*' and * '?' can be used within the sub parts of those composite identifiers to do pattern matching. For convenience, the * syntax <code>groupId</code> is equivalent to <code>groupId:*:*:*</code>, <code>groupId:artifactId</code> is * equivalent to <code>groupId:artifactId:*:*</code> and <code>groupId:artifactId:classifier</code> is equivalent to * <code>groupId:artifactId:*:classifier</code>. For example: * * <pre> * &lt;artifactSet&gt; * &lt;includes&gt; * &lt;include&gt;org.apache.maven:*&lt;/include&gt; * &lt;/includes&gt; * &lt;excludes&gt; * &lt;exclude&gt;*:maven-core&lt;/exclude&gt; * &lt;/excludes&gt; * &lt;/artifactSet&gt; * </pre> */ @Parameter private ArtifactSet artifactSet; /** * Packages to be relocated. For example: * * <pre> * &lt;relocations&gt; * &lt;relocation&gt; * &lt;pattern&gt;org.apache&lt;/pattern&gt; * &lt;shadedPattern&gt;hidden.org.apache&lt;/shadedPattern&gt; * &lt;includes&gt; * &lt;include&gt;org.apache.maven.*&lt;/include&gt; * &lt;/includes&gt; * &lt;excludes&gt; * &lt;exclude&gt;org.apache.maven.Public*&lt;/exclude&gt; * &lt;/excludes&gt; * &lt;/relocation&gt; * &lt;/relocations&gt; * </pre> * * <em>Note:</em> Support for includes exists only since version 1.4. */ @SuppressWarnings( "MismatchedReadAndWriteOfArray" ) @Parameter private PackageRelocation[] relocations; /** * Resource transformers to be used. Please see the "Examples" section for more information on available * transformers and their configuration. */ @Parameter private ResourceTransformer[] transformers; /** * Archive Filters to be used. Allows you to specify an artifact in the form of a composite identifier as used by * {@link #artifactSet} and a set of include/exclude file patterns for filtering which contents of the archive are * added to the shaded jar. From a logical perspective, includes are processed before excludes, thus it's possible * to use an include to collect a set of files from the archive then use excludes to further reduce the set. By * default, all files are included and no files are excluded. If multiple filters apply to an artifact, the * intersection of the matched files will be included in the final JAR. For example: * * <pre> * &lt;filters&gt; * &lt;filter&gt; * &lt;artifact&gt;junit:junit&lt;/artifact&gt; * &lt;includes&gt; * &lt;include&gt;org/junit/**&lt;/include&gt; * &lt;/includes&gt; * &lt;excludes&gt; * &lt;exclude&gt;org/junit/experimental/**&lt;/exclude&gt; * &lt;/excludes&gt; * &lt;/filter&gt; * &lt;/filters&gt; * </pre> */ @SuppressWarnings( "MismatchedReadAndWriteOfArray" ) @Parameter private ArchiveFilter[] filters; /** * The destination directory for the shaded artifact. */ @Parameter( defaultValue = "${project.build.directory}" ) private File outputDirectory; /** * The name of the shaded artifactId. * <p/> * If you like to change the name of the native artifact, you may use the &lt;build>&lt;finalName> setting. If this * is set to something different than &lt;build>&lt;finalName>, no file replacement will be performed, even if * shadedArtifactAttached is being used. */ @Parameter private String finalName; /** * The name of the shaded artifactId. So you may want to use a different artifactId and keep the standard version. * If the original artifactId was "foo" then the final artifact would be something like foo-1.0.jar. So if you * change the artifactId you might have something like foo-special-1.0.jar. */ @Parameter( defaultValue = "${project.artifactId}" ) private String shadedArtifactId; /** * If specified, this will include only artifacts which have groupIds which start with this. */ @Parameter private String shadedGroupFilter; /** * Defines whether the shaded artifact should be attached as classifier to the original artifact. If false, the * shaded jar will be the main artifact of the project */ @Parameter private boolean shadedArtifactAttached; /** * Flag whether to generate a simplified POM for the shaded artifact. If set to <code>true</code>, dependencies that * have been included into the uber JAR will be removed from the <code>&lt;dependencies&gt;</code> section of the * generated POM. The reduced POM will be named <code>dependency-reduced-pom.xml</code> and is stored into the same * directory as the shaded artifact. Unless you also specify dependencyReducedPomLocation, the plugin will create a * temporary file named <code>dependency-reduced-pom.xml</code> in the project basedir. */ @Parameter( defaultValue = "true" ) private boolean createDependencyReducedPom; /** * Where to put the dependency reduced pom. Note: setting a value for this parameter with a directory other than * ${basedir} will change the value of ${basedir} for all executions that come after the shade execution. This is * often not what you want. This is considered an open issue with this plugin. * * @since 1.7 */ @Parameter( defaultValue = "${basedir}/dependency-reduced-pom.xml" ) private File dependencyReducedPomLocation; /** * Create a dependency-reduced POM in ${basedir}/drp-UNIQUE.pom. This avoids build collisions of parallel builds * without moving the dependency-reduced POM to a different directory. The property * maven.shade.dependency-reduced-pom is set to the generated filename. * * @since 1.7.2 */ @Parameter( defaultValue = "false" ) private boolean generateUniqueDependencyReducedPom; /** * When true, dependencies are kept in the pom but with scope 'provided'; when false, the dependency is removed. */ @Parameter private boolean keepDependenciesWithProvidedScope; /** * When true, transitive deps of removed dependencies are promoted to direct dependencies. This should allow the * drop in replacement of the removed deps with the new shaded jar and everything should still work. */ @Parameter private boolean promoteTransitiveDependencies; /** * The name of the classifier used in case the shaded artifact is attached. */ @Parameter( defaultValue = "shaded" ) private String shadedClassifierName; /** * When true, it will attempt to create a sources jar as well */ @Parameter private boolean createSourcesJar; /** * When true, it will attempt to shade the contents of the java source files when creating the sources jar. When * false, it will just relocate the java source files to the shaded paths, but will not modify the actual contents * of the java source files. */ @Parameter( property = "shadeSourcesContent", defaultValue = "false" ) private boolean shadeSourcesContent; /** * When true, dependencies will be stripped down on the class level to only the transitive hull required for the * artifact. <em>Note:</em> Usage of this feature requires Java 1.5 or higher. * * @since 1.4 */ @Parameter private boolean minimizeJar; /** * The path to the output file for the shaded artifact. When this parameter is set, the created archive will neither * replace the project's main artifact nor will it be attached. Hence, this parameter causes the parameters * {@link #finalName}, {@link #shadedArtifactAttached}, {@link #shadedClassifierName} and * {@link #createDependencyReducedPom} to be ignored when used. * * @since 1.3 */ @Parameter private File outputFile; /** * You can pass here the roleHint about your own Shader implementation plexus component. * * @since 1.6 */ @Parameter private String shaderHint; /** * When true, the version of each dependency of the reduced pom will be based on the baseVersion of the original * dependency instead of its resolved version. For example, if the original pom (transitively) depends on * a:a:2.7-SNAPSHOT, if useBaseVersion is set to false, the reduced pom will depend on a:a:2.7-20130312.222222-12 * whereas if useBaseVersion is set to true, the reduced pom will depend on a:a:2.7-SNAPSHOT * * @since 3.0 */ @Parameter( defaultValue = "false" ) private boolean useBaseVersion; @Parameter( defaultValue = "false" ) private boolean shadeTestJar; /** * @since 1.6 */ private PlexusContainer plexusContainer; public void contextualize( Context context ) throws ContextException { plexusContainer = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY ); } /** * @throws MojoExecutionException */ public void execute() throws MojoExecutionException { setupHintedShader(); Set<File> artifacts = new LinkedHashSet<File>(); Set<String> artifactIds = new LinkedHashSet<String>(); Set<File> sourceArtifacts = new LinkedHashSet<File>(); Set<File> testArtifacts = new LinkedHashSet<File>(); ArtifactSelector artifactSelector = new ArtifactSelector( project.getArtifact(), artifactSet, shadedGroupFilter ); if ( artifactSelector.isSelected( project.getArtifact() ) && !"pom".equals( project.getArtifact().getType() ) ) { if ( invalidMainArtifact() ) { createErrorOutput(); throw new MojoExecutionException( "Failed to create shaded artifact, " + "project main artifact does not exist." ); } artifacts.add( project.getArtifact().getFile() ); if ( createSourcesJar ) { File file = shadedSourcesArtifactFile(); if ( file.isFile() ) { sourceArtifacts.add( file ); } } if ( shadeTestJar ) { File file = shadedTestArtifactFile(); if ( file.isFile() ) { testArtifacts.add( file ); } } } processArtifactSelectors( artifacts, artifactIds, sourceArtifacts, artifactSelector ); File outputJar = ( outputFile != null ) ? outputFile : shadedArtifactFileWithClassifier(); File sourcesJar = shadedSourceArtifactFileWithClassifier(); File testJar = shadedTestArtifactFileWithClassifier(); // Now add our extra resources try { List<Filter> filters = getFilters(); List<Relocator> relocators = getRelocators(); List<ResourceTransformer> resourceTransformers = getResourceTransformers(); ShadeRequest shadeRequest = shadeRequest( artifacts, outputJar, filters, relocators, resourceTransformers ); shader.shade( shadeRequest ); if ( createSourcesJar ) { ShadeRequest shadeSourcesRequest = createShadeSourcesRequest( sourceArtifacts, sourcesJar, filters, relocators, resourceTransformers ); shader.shade( shadeSourcesRequest ); } if ( shadeTestJar ) { ShadeRequest shadeSourcesRequest = createShadeSourcesRequest( testArtifacts, testJar, filters, relocators, resourceTransformers ); shader.shade( shadeSourcesRequest ); } if ( outputFile == null ) { boolean renamed = false; // rename the output file if a specific finalName is set // but don't rename if the finalName is the <build><finalName> // because this will be handled implicitly later if ( finalName != null && finalName.length() > 0 // && !finalName.equals( project.getBuild().getFinalName() ) ) { String finalFileName = finalName + "." + project.getArtifact().getArtifactHandler().getExtension(); File finalFile = new File( outputDirectory, finalFileName ); replaceFile( finalFile, outputJar ); outputJar = finalFile; // Also support the sources JAR if ( createSourcesJar ) { finalFileName = finalName + "-sources.jar"; finalFile = new File( outputDirectory, finalFileName ); replaceFile( finalFile, sourcesJar ); sourcesJar = finalFile; } // Also support the test JAR if ( shadeTestJar ) { finalFileName = finalName + "-tests.jar"; finalFile = new File( outputDirectory, finalFileName ); replaceFile( finalFile, testJar ); testJar = finalFile; } renamed = true; } if ( shadedArtifactAttached ) { getLog().info( "Attaching shaded artifact." ); projectHelper.attachArtifact( project, project.getArtifact().getType(), shadedClassifierName, outputJar ); if ( createSourcesJar ) { projectHelper.attachArtifact( project, "java-source", shadedClassifierName + "-sources", sourcesJar ); } } else if ( !renamed ) { getLog().info( "Replacing original artifact with shaded artifact." ); File originalArtifact = project.getArtifact().getFile(); if ( originalArtifact != null ) { replaceFile( originalArtifact, outputJar ); if ( createSourcesJar ) { getLog().info( "Replacing original source artifact with shaded source artifact." ); File shadedSources = shadedSourcesArtifactFile(); replaceFile( shadedSources, sourcesJar ); projectHelper.attachArtifact( project, "java-source", "sources", shadedSources ); } if ( shadeTestJar ) { getLog().info( "Replacing original test artifact with shaded test artifact." ); File shadedTests = shadedTestArtifactFile(); replaceFile( shadedTests, testJar ); projectHelper.attachArtifact( project, "jar", "tests", shadedTests ); } if ( createDependencyReducedPom ) { createDependencyReducedPom( artifactIds ); } } } } } catch ( Exception e ) { throw new MojoExecutionException( "Error creating shaded jar: " + e.getMessage(), e ); } } private void createErrorOutput() { getLog().error( "The project main artifact does not exist. This could have the following" ); getLog().error( "reasons:" ); getLog().error( "- You have invoked the goal directly from the command line. This is not" ); getLog().error( " supported. Please add the goal to the default lifecycle via an" ); getLog().error( " <execution> element in your POM and use \"mvn package\" to have it run." ); getLog().error( "- You have bound the goal to a lifecycle phase before \"package\". Please" ); getLog().error( " remove this binding from your POM such that the goal will be run in" ); getLog().error( " the proper phase." ); getLog().error( "- You removed the configuration of the maven-jar-plugin that produces the main artifact." ); } private ShadeRequest shadeRequest( Set<File> artifacts, File outputJar, List<Filter> filters, List<Relocator> relocators, List<ResourceTransformer> resourceTransformers ) { ShadeRequest shadeRequest = new ShadeRequest(); shadeRequest.setJars( artifacts ); shadeRequest.setUberJar( outputJar ); shadeRequest.setFilters( filters ); shadeRequest.setRelocators( relocators ); shadeRequest.setResourceTransformers( resourceTransformers ); return shadeRequest; } private ShadeRequest createShadeSourcesRequest( Set<File> testArtifacts, File testJar, List<Filter> filters, List<Relocator> relocators, List<ResourceTransformer> resourceTransformers ) { ShadeRequest shadeSourcesRequest = shadeRequest( testArtifacts, testJar, filters, relocators, resourceTransformers ); shadeSourcesRequest.setShadeSourcesContent( shadeSourcesContent ); return shadeSourcesRequest; } private void setupHintedShader() throws MojoExecutionException { if ( shaderHint != null ) { try { shader = (Shader) plexusContainer.lookup( Shader.ROLE, shaderHint ); } catch ( ComponentLookupException e ) { throw new MojoExecutionException( "unable to lookup own Shader implementation with hint:'" + shaderHint + "'", e ); } } } private void processArtifactSelectors( Set<File> artifacts, Set<String> artifactIds, Set<File> sourceArtifacts, ArtifactSelector artifactSelector ) { for ( Artifact artifact : project.getArtifacts() ) { if ( !artifactSelector.isSelected( artifact ) ) { getLog().info( "Excluding " + artifact.getId() + " from the shaded jar." ); continue; } if ( "pom".equals( artifact.getType() ) ) { getLog().info( "Skipping pom dependency " + artifact.getId() + " in the shaded jar." ); continue; } getLog().info( "Including " + artifact.getId() + " in the shaded jar." ); artifacts.add( artifact.getFile() ); artifactIds.add( getId( artifact ) ); if ( createSourcesJar ) { File file = resolveArtifactSources( artifact ); if ( file != null ) { sourceArtifacts.add( file ); } } } } private boolean invalidMainArtifact() { return project.getArtifact().getFile() == null || !project.getArtifact().getFile().isFile(); } private void replaceFile( File oldFile, File newFile ) throws MojoExecutionException { getLog().info( "Replacing " + oldFile + " with " + newFile ); File origFile = new File( outputDirectory, "original-" + oldFile.getName() ); if ( oldFile.exists() && !oldFile.renameTo( origFile ) ) { // try a gc to see if an unclosed stream needs garbage collecting System.gc(); System.gc(); if ( !oldFile.renameTo( origFile ) ) { // Still didn't work. We'll do a copy try { copyFiles( oldFile, origFile ); } catch ( IOException ex ) { // kind of ignorable here. We're just trying to save the original getLog().warn( ex ); } } } if ( !newFile.renameTo( oldFile ) ) { // try a gc to see if an unclosed stream needs garbage collecting System.gc(); System.gc(); if ( !newFile.renameTo( oldFile ) ) { // Still didn't work. We'll do a copy try { copyFiles( newFile, oldFile ); } catch ( IOException ex ) { throw new MojoExecutionException( "Could not replace original artifact with shaded artifact!", ex ); } } } } private void copyFiles( File source, File target ) throws IOException { InputStream in = null; OutputStream out = null; try { in = new FileInputStream( source ); out = new FileOutputStream( target ); IOUtil.copy( in, out ); out.close(); out = null; in.close(); in = null; } finally { IOUtil.close( in ); IOUtil.close( out ); } } private File resolveArtifactSources( Artifact artifact ) { DefaultArtifactCoordinate coordinate = new DefaultArtifactCoordinate(); coordinate.setGroupId( artifact.getGroupId() ); coordinate.setArtifactId( artifact.getArtifactId() ); coordinate.setVersion( artifact.getVersion() ); coordinate.setExtension( "jar" ); coordinate.setClassifier( "sources" ); Artifact resolvedArtifact; try { resolvedArtifact = artifactResolver.resolveArtifact( session.getProjectBuildingRequest(), coordinate ).getArtifact(); } catch ( ArtifactResolverException e ) { getLog().warn( "Could not get sources for " + artifact ); return null; } if ( resolvedArtifact.isResolved() ) { return resolvedArtifact.getFile(); } return null; } private List<Relocator> getRelocators() { List<Relocator> relocators = new ArrayList<Relocator>(); if ( relocations == null ) { return relocators; } for ( PackageRelocation r : relocations ) { relocators.add( new SimpleRelocator( r.getPattern(), r.getShadedPattern(), r.getIncludes(), r.getExcludes(), r.isRawString() ) ); } return relocators; } private List<ResourceTransformer> getResourceTransformers() { if ( transformers == null ) { return Collections.emptyList(); } return Arrays.asList( transformers ); } private List<Filter> getFilters() throws MojoExecutionException { List<Filter> filters = new ArrayList<Filter>(); List<SimpleFilter> simpleFilters = new ArrayList<SimpleFilter>(); if ( this.filters != null && this.filters.length > 0 ) { Map<Artifact, ArtifactId> artifacts = new HashMap<Artifact, ArtifactId>(); artifacts.put( project.getArtifact(), new ArtifactId( project.getArtifact() ) ); for ( Artifact artifact : project.getArtifacts() ) { artifacts.put( artifact, new ArtifactId( artifact ) ); } for ( ArchiveFilter filter : this.filters ) { ArtifactId pattern = new ArtifactId( filter.getArtifact() ); Set<File> jars = new HashSet<File>(); for ( Map.Entry<Artifact, ArtifactId> entry : artifacts.entrySet() ) { if ( entry.getValue().matches( pattern ) ) { Artifact artifact = entry.getKey(); jars.add( artifact.getFile() ); if ( createSourcesJar ) { File file = resolveArtifactSources( artifact ); if ( file != null ) { jars.add( file ); } } } } if ( jars.isEmpty() ) { getLog().info( "No artifact matching filter " + filter.getArtifact() ); continue; } simpleFilters.add( new SimpleFilter( jars, filter.getIncludes(), filter.getExcludes() ) ); } } filters.addAll( simpleFilters ); if ( minimizeJar ) { getLog().info( "Minimizing jar " + project.getArtifact() ); try { filters.add( new MinijarFilter( project, getLog(), simpleFilters ) ); } catch ( IOException e ) { throw new MojoExecutionException( "Failed to analyze class dependencies", e ); } } return filters; } private File shadedArtifactFileWithClassifier() { Artifact artifact = project.getArtifact(); final String shadedName = shadedArtifactId + "-" + artifact.getVersion() + "-" + shadedClassifierName + "." + artifact.getArtifactHandler().getExtension(); return new File( outputDirectory, shadedName ); } private File shadedSourceArtifactFileWithClassifier() { Artifact artifact = project.getArtifact(); final String shadedName = shadedArtifactId + "-" + artifact.getVersion() + "-" + shadedClassifierName + "-sources." + artifact.getArtifactHandler().getExtension(); return new File( outputDirectory, shadedName ); } private File shadedTestArtifactFileWithClassifier() { Artifact artifact = project.getArtifact(); final String shadedName = shadedArtifactId + "-" + artifact.getVersion() + "-" + shadedClassifierName + "-tests." + artifact.getArtifactHandler().getExtension(); return new File( outputDirectory, shadedName ); } private File shadedSourcesArtifactFile() { Artifact artifact = project.getArtifact(); String shadedName; if ( project.getBuild().getFinalName() != null ) { shadedName = project.getBuild().getFinalName() + "-sources." + artifact.getArtifactHandler().getExtension(); } else { shadedName = shadedArtifactId + "-" + artifact.getVersion() + "-sources." + artifact.getArtifactHandler().getExtension(); } return new File( outputDirectory, shadedName ); } private File shadedTestArtifactFile() { Artifact artifact = project.getArtifact(); String shadedName; if ( project.getBuild().getFinalName() != null ) { shadedName = project.getBuild().getFinalName() + "-tests." + artifact.getArtifactHandler().getExtension(); } else { shadedName = shadedArtifactId + "-" + artifact.getVersion() + "-tests." + artifact.getArtifactHandler().getExtension(); } return new File( outputDirectory, shadedName ); } // We need to find the direct dependencies that have been included in the uber JAR so that we can modify the // POM accordingly. private void createDependencyReducedPom( Set<String> artifactsToRemove ) throws IOException, DependencyGraphBuilderException, ProjectBuildingException { List<Dependency> dependencies = new ArrayList<Dependency>(); boolean modified = false; List<Dependency> transitiveDeps = new ArrayList<Dependency>(); // NOTE: By using the getArtifacts() we get the completely evaluated artifacts // including the system scoped artifacts with expanded values of properties used. for ( Artifact artifact : project.getArtifacts() ) { if ( "pom".equals( artifact.getType() ) ) { // don't include pom type dependencies in dependency reduced pom continue; } // promote Dependency dep = createDependency( artifact ); // we'll figure out the exclusions in a bit. transitiveDeps.add( dep ); } List<Dependency> origDeps = project.getDependencies(); if ( promoteTransitiveDependencies ) { origDeps = transitiveDeps; } Model model = project.getOriginalModel(); // MSHADE-185: We will remove all system scoped dependencies which usually // have some kind of property usage. At this time the properties within // such things are already evaluated. List<Dependency> originalDependencies = model.getDependencies(); removeSystemScopedDependencies( artifactsToRemove, originalDependencies ); for ( Dependency d : origDeps ) { dependencies.add( d ); String id = getId( d ); if ( artifactsToRemove.contains( id ) ) { modified = true; if ( keepDependenciesWithProvidedScope ) { d.setScope( "provided" ); } else { dependencies.remove( d ); } } } // MSHADE-155 model.setArtifactId( shadedArtifactId ); // MSHADE-185: We will add those system scoped dependencies // from the non interpolated original pom file. So we keep // things like this: <systemPath>${tools.jar}</systemPath> intact. addSystemScopedDependencyFromNonInterpolatedPom( dependencies, originalDependencies ); // Check to see if we have a reduction and if so rewrite the POM. rewriteDependencyReducedPomIfWeHaveReduction( dependencies, modified, transitiveDeps, model ); } private void rewriteDependencyReducedPomIfWeHaveReduction( List<Dependency> dependencies, boolean modified, List<Dependency> transitiveDeps, Model model ) throws IOException, ProjectBuildingException, DependencyGraphBuilderException { if ( modified ) { for ( int loopCounter = 0; modified; loopCounter++ ) { model.setDependencies( dependencies ); if ( generateUniqueDependencyReducedPom ) { dependencyReducedPomLocation = File.createTempFile( "dependency-reduced-pom-", ".xml", project.getBasedir() ); project.getProperties().setProperty( "maven.shade.dependency-reduced-pom", dependencyReducedPomLocation.getAbsolutePath() ); } else { if ( dependencyReducedPomLocation == null ) { // MSHADE-123: We can't default to 'target' because it messes up uses of ${project.basedir} dependencyReducedPomLocation = new File( project.getBasedir(), "dependency-reduced-pom.xml" ); } } File f = dependencyReducedPomLocation; // MSHADE-225 // Works for now, maybe there's a better algorithm where no for-loop is required if ( loopCounter == 0 ) { getLog().info( "Dependency-reduced POM written at: " + f.getAbsolutePath() ); } if ( f.exists() ) { // noinspection ResultOfMethodCallIgnored f.delete(); } Writer w = WriterFactory.newXmlWriter( f ); String replaceRelativePath = null; if ( model.getParent() != null ) { replaceRelativePath = model.getParent().getRelativePath(); } if ( model.getParent() != null ) { File parentFile = new File( project.getBasedir(), model.getParent().getRelativePath() ).getCanonicalFile(); if ( !parentFile.isFile() ) { parentFile = new File( parentFile, "pom.xml" ); } parentFile = parentFile.getCanonicalFile(); String relPath = RelativizePath.convertToRelativePath( parentFile, f ); model.getParent().setRelativePath( relPath ); } try { PomWriter.write( w, model, true ); } finally { if ( model.getParent() != null ) { model.getParent().setRelativePath( replaceRelativePath ); } w.close(); } ProjectBuildingRequest projectBuildingRequest = new DefaultProjectBuildingRequest( session.getProjectBuildingRequest() ); projectBuildingRequest.setLocalRepository( localRepository ); projectBuildingRequest.setRemoteRepositories( remoteArtifactRepositories ); ProjectBuildingResult result = projectBuilder.build( f, projectBuildingRequest ); getLog().debug( "updateExcludesInDeps()" ); modified = updateExcludesInDeps( result.getProject(), dependencies, transitiveDeps ); } project.setFile( dependencyReducedPomLocation ); } } private void removeSystemScopedDependencies( Set<String> artifactsToRemove, List<Dependency> originalDependencies ) { for ( Dependency dependency : originalDependencies ) { if ( dependency.getScope() != null && dependency.getScope().equalsIgnoreCase( "system" ) ) { artifactsToRemove.add( getId( dependency ) ); } } } private void addSystemScopedDependencyFromNonInterpolatedPom( List<Dependency> dependencies, List<Dependency> originalDependencies ) { for ( Dependency dependency : originalDependencies ) { if ( dependency.getScope() != null && dependency.getScope().equalsIgnoreCase( "system" ) ) { dependencies.add( dependency ); } } } private Dependency createDependency( Artifact artifact ) { Dependency dep = new Dependency(); dep.setArtifactId( artifact.getArtifactId() ); if ( artifact.hasClassifier() ) { dep.setClassifier( artifact.getClassifier() ); } dep.setGroupId( artifact.getGroupId() ); dep.setOptional( artifact.isOptional() ); dep.setScope( artifact.getScope() ); dep.setType( artifact.getType() ); if ( useBaseVersion ) { dep.setVersion( artifact.getBaseVersion() ); } else { dep.setVersion( artifact.getVersion() ); } return dep; } private String getId( Artifact artifact ) { return getId( artifact.getGroupId(), artifact.getArtifactId(), artifact.getType(), artifact.getClassifier() ); } private String getId( Dependency dependency ) { return getId( dependency.getGroupId(), dependency.getArtifactId(), dependency.getType(), dependency.getClassifier() ); } private String getId( String groupId, String artifactId, String type, String classifier ) { return groupId + ":" + artifactId + ":" + type + ":" + ( ( classifier != null ) ? classifier : "" ); } public boolean updateExcludesInDeps( MavenProject project, List<Dependency> dependencies, List<Dependency> transitiveDeps ) throws DependencyGraphBuilderException { DependencyNode node = dependencyGraphBuilder.buildDependencyGraph( project, null ); boolean modified = false; for ( DependencyNode n2 : node.getChildren() ) { for ( DependencyNode n3 : n2.getChildren() ) { // check if it really isn't in the list of original dependencies. Maven // prior to 2.0.8 may grab versions from transients instead of // from the direct deps in which case they would be marked included // instead of OMITTED_FOR_DUPLICATE // also, if not promoting the transitives, level 2's would be included boolean found = false; for ( Dependency dep : transitiveDeps ) { if ( dep.getArtifactId().equals( n3.getArtifact().getArtifactId() ) && dep.getGroupId().equals( n3.getArtifact().getGroupId() ) && ( dep.getType() == null || dep.getType().equals( n3.getArtifact().getType() ) ) ) { found = true; break; } } if ( !found ) { for ( Dependency dep : dependencies ) { if ( dep.getArtifactId().equals( n2.getArtifact().getArtifactId() ) && dep.getGroupId().equals( n2.getArtifact().getGroupId() ) && ( dep.getType() == null || dep.getType().equals( n2.getArtifact().getType() ) ) ) { Exclusion exclusion = new Exclusion(); exclusion.setArtifactId( n3.getArtifact().getArtifactId() ); exclusion.setGroupId( n3.getArtifact().getGroupId() ); dep.addExclusion( exclusion ); modified = true; break; } } } } } return modified; } }
apache-2.0
shuaijie506/jwfm
jwfm/src/main/java/com/dx/jwfm/framework/core/parser/IMacroValueGenerator.java
131
package com.dx.jwfm.framework.core.parser; public interface IMacroValueGenerator { public Object getValue(String name); }
apache-2.0
everttigchelaar/camel-svn
camel-core/src/main/java/org/apache/camel/impl/scan/AssignableToPackageScanFilter.java
2219
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.scan; import java.util.HashSet; import java.util.Set; import org.apache.camel.spi.PackageScanFilter; /** * Package scan filter for testing if a given class is assignable to another class. */ public class AssignableToPackageScanFilter implements PackageScanFilter { private final Set<Class<?>> parents = new HashSet<Class<?>>(); public AssignableToPackageScanFilter() { } public AssignableToPackageScanFilter(Class<?> parentType) { parents.add(parentType); } public AssignableToPackageScanFilter(Set<Class<?>> parents) { this.parents.addAll(parents); } public void addParentType(Class<?> parentType) { parents.add(parentType); } public boolean matches(Class<?> type) { if (parents != null && parents.size() > 0) { for (Class<?> parent : parents) { if (parent.isAssignableFrom(type)) { return true; } } } return false; } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (Class<?> parent : parents) { sb.append(parent.getSimpleName()).append(", "); } sb.setLength(sb.length() > 0 ? sb.length() - 2 : 0); return "is assignable to any of " + sb; } }
apache-2.0
sahan/Packrat
packrat/src/main/java/com/lonepulse/packrat/SQLitePersistenceUnit.java
2088
package com.lonepulse.packrat; /* * #%L * Packrat * %% * Copyright (C) 2013 Lonepulse * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Set; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteDatabase.CursorFactory; import android.database.sqlite.SQLiteOpenHelper; import com.lonepulse.packrat.config.PropertyReader; import com.lonepulse.packrat.config.PropertyReader.PROPERTY; /** * <p>This is a concrete implementation of {@link PersistenceUnit} which * realizes the domain as an <b>SQLite</b> database. * * @version 1.1.0 * <br><br> * @author <a href="mailto:sahan@lonepulse.com">Lahiru Sahan Jayasinghe</a> */ public abstract class SQLitePersistenceUnit extends SQLiteOpenHelper implements PersistenceUnit { //TODO support database downgrades? /** * <p>See {@link SQLiteOpenHelper#SQLiteOpenHelper(Context, String, CursorFactory, int)}. */ public SQLitePersistenceUnit(Context context) { super(context, PropertyReader.read(context, PROPERTY.NAME), null, Integer.parseInt(PropertyReader.read(context, PROPERTY.VERSION))); } /** * {@inheritDoc} */ @Override public void onCreate(SQLiteDatabase sqLiteDatabase) { Set<Class<Object>> entitySet = entities(); //TODO read metadata and create schema } /** * {@inheritDoc} */ @Override public void onUpgrade(SQLiteDatabase sqLiteDatabase, int oldVersion, int newVersion) { Set<Class<Object>> entitySet = entities(); //TODO read metadata and upgrade schema } }
apache-2.0