gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.kafka.pubsub;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import javax.xml.bind.DatatypeConverter;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.util.FlowFileFilters;
import org.apache.nifi.processor.util.StandardValidators;
@Tags({"Apache", "Kafka", "Put", "Send", "Message", "PubSub", "1.0"})
@CapabilityDescription("Sends the contents of a FlowFile as a message to Apache Kafka using the Kafka 1.0 Producer API."
+ "The messages to send may be individual FlowFiles or may be delimited, using a "
+ "user-specified delimiter, such as a new-line. "
+ "The complementary NiFi processor for fetching messages is ConsumeKafka_1_0.")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@DynamicProperty(name = "The name of a Kafka configuration property.", value = "The value of a given Kafka configuration property.",
description = "These properties will be added on the Kafka configuration after loading any provided configuration properties."
+ " In the event a dynamic property represents a property that was already set, its value will be ignored and WARN message logged."
+ " For the list of available Kafka properties please refer to: http://kafka.apache.org/documentation.html#configuration. ",
expressionLanguageScope = ExpressionLanguageScope.VARIABLE_REGISTRY)
@WritesAttribute(attribute = "msg.count", description = "The number of messages that were sent to Kafka for this FlowFile. This attribute is added only to "
+ "FlowFiles that are routed to success. If the <Message Demarcator> Property is not set, this will always be 1, but if the Property is set, it may "
+ "be greater than 1.")
public class PublishKafka_1_0 extends AbstractProcessor {
protected static final String MSG_COUNT = "msg.count";
static final AllowableValue DELIVERY_REPLICATED = new AllowableValue("all", "Guarantee Replicated Delivery",
"FlowFile will be routed to failure unless the message is replicated to the appropriate "
+ "number of Kafka Nodes according to the Topic configuration");
static final AllowableValue DELIVERY_ONE_NODE = new AllowableValue("1", "Guarantee Single Node Delivery",
"FlowFile will be routed to success if the message is received by a single Kafka node, "
+ "whether or not it is replicated. This is faster than <Guarantee Replicated Delivery> "
+ "but can result in data loss if a Kafka node crashes");
static final AllowableValue DELIVERY_BEST_EFFORT = new AllowableValue("0", "Best Effort",
"FlowFile will be routed to success after successfully writing the content to a Kafka node, "
+ "without waiting for a response. This provides the best performance but may result in data loss.");
static final AllowableValue ROUND_ROBIN_PARTITIONING = new AllowableValue(Partitioners.RoundRobinPartitioner.class.getName(),
Partitioners.RoundRobinPartitioner.class.getSimpleName(),
"Messages will be assigned partitions in a round-robin fashion, sending the first message to Partition 1, "
+ "the next Partition to Partition 2, and so on, wrapping as necessary.");
static final AllowableValue RANDOM_PARTITIONING = new AllowableValue("org.apache.kafka.clients.producer.internals.DefaultPartitioner",
"DefaultPartitioner", "Messages will be assigned to random partitions.");
static final AllowableValue UTF8_ENCODING = new AllowableValue("utf-8", "UTF-8 Encoded", "The key is interpreted as a UTF-8 Encoded string.");
static final AllowableValue HEX_ENCODING = new AllowableValue("hex", "Hex Encoded",
"The key is interpreted as arbitrary binary data that is encoded using hexadecimal characters with uppercase letters.");
static final PropertyDescriptor TOPIC = new PropertyDescriptor.Builder()
.name("topic")
.displayName("Topic Name")
.description("The name of the Kafka Topic to publish to.")
.required(true)
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
static final PropertyDescriptor DELIVERY_GUARANTEE = new PropertyDescriptor.Builder()
.name(ProducerConfig.ACKS_CONFIG)
.displayName("Delivery Guarantee")
.description("Specifies the requirement for guaranteeing that a message is sent to Kafka. Corresponds to Kafka's 'acks' property.")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.allowableValues(DELIVERY_BEST_EFFORT, DELIVERY_ONE_NODE, DELIVERY_REPLICATED)
.defaultValue(DELIVERY_BEST_EFFORT.getValue())
.build();
static final PropertyDescriptor METADATA_WAIT_TIME = new PropertyDescriptor.Builder()
.name(ProducerConfig.MAX_BLOCK_MS_CONFIG)
.displayName("Max Metadata Wait Time")
.description("The amount of time publisher will wait to obtain metadata or wait for the buffer to flush during the 'send' call before failing the "
+ "entire 'send' call. Corresponds to Kafka's 'max.block.ms' property")
.required(true)
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.defaultValue("5 sec")
.build();
static final PropertyDescriptor ACK_WAIT_TIME = new PropertyDescriptor.Builder()
.name("ack.wait.time")
.displayName("Acknowledgment Wait Time")
.description("After sending a message to Kafka, this indicates the amount of time that we are willing to wait for a response from Kafka. "
+ "If Kafka does not acknowledge the message within this time period, the FlowFile will be routed to 'failure'.")
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(true)
.defaultValue("5 secs")
.build();
static final PropertyDescriptor MAX_REQUEST_SIZE = new PropertyDescriptor.Builder()
.name("max.request.size")
.displayName("Max Request Size")
.description("The maximum size of a request in bytes. Corresponds to Kafka's 'max.request.size' property and defaults to 1 MB (1048576).")
.required(true)
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
.defaultValue("1 MB")
.build();
static final PropertyDescriptor KEY = new PropertyDescriptor.Builder()
.name("kafka-key")
.displayName("Kafka Key")
.description("The Key to use for the Message. "
+ "If not specified, the flow file attribute 'kafka.key' is used as the message key, if it is present."
+ "Beware that setting Kafka key and demarcating at the same time may potentially lead to many Kafka messages with the same key."
+ "Normally this is not a problem as Kafka does not enforce or assume message and key uniqueness. Still, setting the demarcator and Kafka key at the same time poses a risk of "
+ "data loss on Kafka. During a topic compaction on Kafka, messages will be deduplicated based on this key.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
static final PropertyDescriptor KEY_ATTRIBUTE_ENCODING = new PropertyDescriptor.Builder()
.name("key-attribute-encoding")
.displayName("Key Attribute Encoding")
.description("FlowFiles that are emitted have an attribute named '" + KafkaProcessorUtils.KAFKA_KEY + "'. This property dictates how the value of the attribute should be encoded.")
.required(true)
.defaultValue(UTF8_ENCODING.getValue())
.allowableValues(UTF8_ENCODING, HEX_ENCODING)
.build();
static final PropertyDescriptor MESSAGE_DEMARCATOR = new PropertyDescriptor.Builder()
.name("message-demarcator")
.displayName("Message Demarcator")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.description("Specifies the string (interpreted as UTF-8) to use for demarcating multiple messages within "
+ "a single FlowFile. If not specified, the entire content of the FlowFile will be used as a single message. If specified, the "
+ "contents of the FlowFile will be split on this delimiter and each section sent as a separate Kafka message. "
+ "To enter special character such as 'new line' use CTRL+Enter or Shift+Enter, depending on your OS.")
.build();
static final PropertyDescriptor PARTITION_CLASS = new PropertyDescriptor.Builder()
.name(ProducerConfig.PARTITIONER_CLASS_CONFIG)
.displayName("Partitioner class")
.description("Specifies which class to use to compute a partition id for a message. Corresponds to Kafka's 'partitioner.class' property.")
.allowableValues(ROUND_ROBIN_PARTITIONING, RANDOM_PARTITIONING)
.defaultValue(RANDOM_PARTITIONING.getValue())
.required(false)
.build();
static final PropertyDescriptor COMPRESSION_CODEC = new PropertyDescriptor.Builder()
.name(ProducerConfig.COMPRESSION_TYPE_CONFIG)
.displayName("Compression Type")
.description("This parameter allows you to specify the compression codec for all data generated by this producer.")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.allowableValues("none", "gzip", "snappy", "lz4")
.defaultValue("none")
.build();
static final PropertyDescriptor ATTRIBUTE_NAME_REGEX = new PropertyDescriptor.Builder()
.name("attribute-name-regex")
.displayName("Attributes to Send as Headers (Regex)")
.description("A Regular Expression that is matched against all FlowFile attribute names. "
+ "Any attribute whose name matches the regex will be added to the Kafka messages as a Header. "
+ "If not specified, no FlowFile attributes will be added as headers.")
.addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(false)
.build();
static final PropertyDescriptor USE_TRANSACTIONS = new PropertyDescriptor.Builder()
.name("use-transactions")
.displayName("Use Transactions")
.description("Specifies whether or not NiFi should provide Transactional guarantees when communicating with Kafka. If there is a problem sending data to Kafka, "
+ "and this property is set to false, then the messages that have already been sent to Kafka will continue on and be delivered to consumers. "
+ "If this is set to true, then the Kafka transaction will be rolled back so that those messages are not available to consumers. Setting this to true "
+ "requires that the <Delivery Guarantee> property be set to \"Guarantee Replicated Delivery.\"")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.allowableValues("true", "false")
.defaultValue("true")
.required(true)
.build();
static final PropertyDescriptor MESSAGE_HEADER_ENCODING = new PropertyDescriptor.Builder()
.name("message-header-encoding")
.displayName("Message Header Encoding")
.description("For any attribute that is added as a message header, as configured via the <Attributes to Send as Headers> property, "
+ "this property indicates the Character Encoding to use for serializing the headers.")
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
.defaultValue("UTF-8")
.required(false)
.build();
static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("FlowFiles for which all content was sent to Kafka.")
.build();
static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("Any FlowFile that cannot be sent to Kafka will be routed to this Relationship")
.build();
private static final List<PropertyDescriptor> PROPERTIES;
private static final Set<Relationship> RELATIONSHIPS;
private volatile PublisherPool publisherPool = null;
static {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.addAll(KafkaProcessorUtils.getCommonPropertyDescriptors());
properties.add(TOPIC);
properties.add(DELIVERY_GUARANTEE);
properties.add(USE_TRANSACTIONS);
properties.add(ATTRIBUTE_NAME_REGEX);
properties.add(MESSAGE_HEADER_ENCODING);
properties.add(KEY);
properties.add(KEY_ATTRIBUTE_ENCODING);
properties.add(MESSAGE_DEMARCATOR);
properties.add(MAX_REQUEST_SIZE);
properties.add(ACK_WAIT_TIME);
properties.add(METADATA_WAIT_TIME);
properties.add(PARTITION_CLASS);
properties.add(COMPRESSION_CODEC);
PROPERTIES = Collections.unmodifiableList(properties);
final Set<Relationship> relationships = new HashSet<>();
relationships.add(REL_SUCCESS);
relationships.add(REL_FAILURE);
RELATIONSHIPS = Collections.unmodifiableSet(relationships);
}
@Override
public Set<Relationship> getRelationships() {
return RELATIONSHIPS;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return PROPERTIES;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
.description("Specifies the value for '" + propertyDescriptorName + "' Kafka Configuration.")
.name(propertyDescriptorName)
.addValidator(new KafkaProcessorUtils.KafkaConfigValidator(ProducerConfig.class))
.dynamic(true)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
}
@Override
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>();
results.addAll(KafkaProcessorUtils.validateCommonProperties(validationContext));
final boolean useTransactions = validationContext.getProperty(USE_TRANSACTIONS).asBoolean();
if (useTransactions) {
final String deliveryGuarantee = validationContext.getProperty(DELIVERY_GUARANTEE).getValue();
if (!DELIVERY_REPLICATED.getValue().equals(deliveryGuarantee)) {
results.add(new ValidationResult.Builder()
.subject("Delivery Guarantee")
.valid(false)
.explanation("In order to use Transactions, the Delivery Guarantee must be \"Guarantee Replicated Delivery.\" "
+ "Either change the <Use Transactions> property or the <Delivery Guarantee> property.")
.build());
}
}
return results;
}
private synchronized PublisherPool getPublisherPool(final ProcessContext context) {
PublisherPool pool = publisherPool;
if (pool != null) {
return pool;
}
return publisherPool = createPublisherPool(context);
}
protected PublisherPool createPublisherPool(final ProcessContext context) {
final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
final Charset charset = Charset.forName(charsetName);
final Map<String, Object> kafkaProperties = new HashMap<>();
KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, attributeNamePattern, charset);
}
@OnStopped
public void closePool() {
if (publisherPool != null) {
publisherPool.close();
}
publisherPool = null;
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final boolean useDemarcator = context.getProperty(MESSAGE_DEMARCATOR).isSet();
final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(250, DataUnit.KB, 500));
if (flowFiles.isEmpty()) {
return;
}
final PublisherPool pool = getPublisherPool(context);
if (pool == null) {
context.yield();
return;
}
final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue();
final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
final long startTime = System.nanoTime();
try (final PublisherLease lease = pool.obtainPublisher()) {
if (useTransactions) {
lease.beginTransaction();
}
// Send each FlowFile to Kafka asynchronously.
for (final FlowFile flowFile : flowFiles) {
if (!isScheduled()) {
// If stopped, re-queue FlowFile instead of sending it
if (useTransactions) {
session.rollback();
lease.rollback();
return;
}
session.transfer(flowFile);
continue;
}
final byte[] messageKey = getMessageKey(flowFile, context);
final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(flowFile).getValue();
final byte[] demarcatorBytes;
if (useDemarcator) {
demarcatorBytes = context.getProperty(MESSAGE_DEMARCATOR).evaluateAttributeExpressions(flowFile).getValue().getBytes(StandardCharsets.UTF_8);
} else {
demarcatorBytes = null;
}
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream rawIn) throws IOException {
try (final InputStream in = new BufferedInputStream(rawIn)) {
lease.publish(flowFile, in, messageKey, demarcatorBytes, topic);
}
}
});
}
// Complete the send
final PublishResult publishResult = lease.complete();
if (publishResult.isFailure()) {
getLogger().info("Failed to send FlowFile to kafka; transferring to failure");
session.transfer(flowFiles, REL_FAILURE);
return;
}
// Transfer any successful FlowFiles.
final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
for (FlowFile success : flowFiles) {
final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(success).getValue();
final int msgCount = publishResult.getSuccessfulMessageCount(success);
success = session.putAttribute(success, MSG_COUNT, String.valueOf(msgCount));
session.adjustCounter("Messages Sent", msgCount, true);
final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, topic);
session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis);
session.transfer(success, REL_SUCCESS);
}
}
}
private byte[] getMessageKey(final FlowFile flowFile, final ProcessContext context) {
final String uninterpretedKey;
if (context.getProperty(KEY).isSet()) {
uninterpretedKey = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
} else {
uninterpretedKey = flowFile.getAttribute(KafkaProcessorUtils.KAFKA_KEY);
}
if (uninterpretedKey == null) {
return null;
}
final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue();
if (UTF8_ENCODING.getValue().equals(keyEncoding)) {
return uninterpretedKey.getBytes(StandardCharsets.UTF_8);
}
return DatatypeConverter.parseHexBinary(uninterpretedKey);
}
}
| |
/*L
* Copyright Washington University in St. Louis
* Copyright SemanticBits
* Copyright Persistent Systems
* Copyright Krishagni
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/catissue-tools/LICENSE.txt for details.
*/
/**
*
*/
package edu.wustl.clinportal.security;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import edu.wustl.common.exception.ErrorKey;
import edu.wustl.common.idp.IdPManager;
import edu.wustl.common.util.XMLPropertyHandler;
import edu.wustl.common.util.logger.Logger;
import edu.wustl.security.exception.SMException;
import edu.wustl.security.global.ProvisionManager;
import edu.wustl.security.global.Roles;
import edu.wustl.security.global.Utility;
import edu.wustl.security.locator.SecurityManagerPropertiesLocator;
import gov.nih.nci.security.UserProvisioningManager;
import gov.nih.nci.security.authorization.domainobjects.Group;
import gov.nih.nci.security.authorization.domainobjects.Role;
import gov.nih.nci.security.authorization.domainobjects.User;
import gov.nih.nci.security.dao.GroupSearchCriteria;
import gov.nih.nci.security.exceptions.CSException;
import gov.nih.nci.security.exceptions.CSObjectNotFoundException;
import gov.nih.nci.security.exceptions.CSTransactionException;
/**
* @author shital_lawhale
* This method overrides the SecurityManager class in common package.
* This class is used to define application specific roles and groups.
*
*/
public class SecurityManager extends edu.wustl.security.manager.SecurityManager
{
private transient Logger logger = Logger.getCommonLogger(SecurityManager.class);
public static final String TRUE = "true";
public static final String IDP_ENABLED = "idp.enabled";
/**
* This method returns Vector of all the role objects defined for the
* application from the database
*
* @return @throws
* SMException
*/
public Vector getRoles() throws SMException
{
Vector roles = new Vector();
UserProvisioningManager userProvManager;
try
{
userProvManager = ProvisionManager.getInstance().getUserProvisioningManager();
roles.add(userProvManager.getRoleById(ProvisionManager.getInstance().getRoleID(
edu.wustl.security.global.Constants.ROLE_ADMIN)));
roles.add(userProvManager.getRoleById(ProvisionManager.getInstance().getRoleID(
edu.wustl.security.global.Constants.SCIENTIST)));
}
catch (CSException e)
{
logger.debug("Unable to get roles: Exception: " + e.getMessage());
throw new SMException(ErrorKey.getErrorKey("error.fetch.roleslist"), e,
"Exception while getting roles");
}
return roles;
}
/**
* Assigns a Role to a User
*
* @param userName -
* the User Name to to whom the Role will be assigned
* @param roleID -
* The id of the Role which is to be assigned to the user
* @throws SMException
*/
public void assignRoleToUser(String userID, String roleID) throws SMException
{
logger.debug("UserName: " + userID + " Role ID:" + roleID);
UserProvisioningManager userProvManager;
User user;
String groupId;
try
{
userProvManager = ProvisionManager.getInstance().getUserProvisioningManager();
user = userProvManager.getUserById(userID);
//Remove user from any other role if he is assigned some
ProvisionManager provisionManager = ProvisionManager.getInstance();
userProvManager.removeUserFromGroup(provisionManager.getGroupID(ADMIN_GROUP), String
.valueOf(user.getUserId()));
userProvManager.removeUserFromGroup(provisionManager.getGroupID(PUBLIC_GROUP), String
.valueOf(user.getUserId()));
//Add user to corresponding group
groupId = getGroupIdForRole(roleID);
if (groupId == null)
{
logger.debug(" User assigned no role");
}
else
{
assignAdditionalGroupsToUser(String.valueOf(user.getUserId()),
new String[]{groupId});
logger.debug(" User assigned role:" + groupId);
}
}
catch (CSException e)
{
logger.debug("UNABLE TO ASSIGN ROLE TO USER: Exception: " + e.getMessage());
throw new SMException(ErrorKey.getErrorKey("error.assign.user.role"), e,
"Exception while assigning role to user");
}
}
/**
* @param roleID
* @return
* @throws SMException
*/
public String getGroupIdForRole(String roleID) throws SMException
{
String groupId = null;
ProvisionManager provisionManager = ProvisionManager.getInstance();
if (roleID.equals(provisionManager
.getRoleID(edu.wustl.security.global.Constants.ROLE_ADMIN)))
{
logger.debug(" role corresponds to Administrator group");
groupId = provisionManager.getGroupID(ADMIN_GROUP);
}
else if (roleID.equals(provisionManager
.getRoleID(edu.wustl.security.global.Constants.SCIENTIST)))
{
logger.debug(" role corresponds to public group");
groupId = provisionManager.getGroupID(PUBLIC_GROUP);
}
else
{
logger.debug("role corresponds to no group");
}
return groupId;
}
/* (non-Javadoc)
* @see edu.wustl.common.security.SecurityManager#getUserRole(long)
*/
public Role getUserRole(long userID) throws SMException
{
Set groups;
UserProvisioningManager userProvManager;
Iterator iterator;
Group group;
Role role = null;
try
{
userProvManager = ProvisionManager.getInstance().getUserProvisioningManager();
groups = userProvManager.getGroups(String.valueOf(userID));
iterator = groups.iterator();
while (iterator.hasNext())
{
group = (Group) iterator.next();
if (group.getApplication().getApplicationName().equals(
SecurityManagerPropertiesLocator.getInstance().getApplicationCtxName()))
{
role = getRoleFromGroup(userProvManager, group);
if (role != null)
{
break;
}
}
}
}
catch (CSException e)
{
logger.debug("Unable to get user role: Exception: " + e.getMessage());
throw new SMException(ErrorKey.getErrorKey("error.fetch.user.role"), e,
"Exception while getting user role");
}
return role;
}
/**
* @param userProvManager
* @param group
* @return
* @throws CSObjectNotFoundException
* @throws SMException
*/
private Role getRoleFromGroup(UserProvisioningManager userProvManager, Group group)
throws CSObjectNotFoundException, SMException
{
Role role = null;
if (group.getGroupName().equals(ADMIN_GROUP))
{
role = userProvManager.getRoleById(ProvisionManager.getInstance().getRoleID(
edu.wustl.security.global.Constants.ROLE_ADMIN));
}
else if (group.getGroupName().equals(PUBLIC_GROUP))
{
role = userProvManager.getRoleById(ProvisionManager.getInstance().getRoleID(
edu.wustl.security.global.Constants.SCIENTIST));
}
return role;
}
/* (non-Javadoc)
* @see edu.wustl.common.security.SecurityManager#getUserGroup(long)
*/
public String getUserGroup(long userID) throws SMException
{
Set groups;
UserProvisioningManager userProvManager;
Iterator iterator;
Group group;
String rolename = null;
try
{
userProvManager = ProvisionManager.getInstance().getUserProvisioningManager();
groups = userProvManager.getGroups(String.valueOf(userID));
iterator = groups.iterator();
while (iterator.hasNext())
{
group = (Group) iterator.next();
if (group.getApplication().getApplicationName().equals(
SecurityManagerPropertiesLocator.getInstance().getApplicationCtxName()))
{
rolename = getRolename(group);
if (rolename != null)
{
break;
}
}
}
}
catch (CSException e)
{
logger.debug("Unable to get user group: Exception: " + e.getMessage());
throw new SMException(ErrorKey.getErrorKey("error.fetch.usergroup"), e,
"Exception while getting user group");
}
return rolename;
}
/**
* @param group
* @return
*/
private String getRolename(Group group)
{
String rolename = null;
if (group.getGroupName().equals(ADMIN_GROUP))
{
rolename = Roles.ADMINISTRATOR;
}
else if (group.getGroupName().equals(SUPERVISOR_GROUP))
{
rolename = Roles.SUPERVISOR;
}
else if (group.getGroupName().equals(TECHNICIAN_GROUP))
{
rolename = Roles.TECHNICIAN;
}
else if (group.getGroupName().equals(PUBLIC_GROUP))
{
rolename = Roles.SCIENTIST;
}
return rolename;
}
/**
* This method returns array of CSM user id of all users who are administrators
* @return
* @throws SMException
*/
public Long[] getAllAdministrators() throws SMException
{
Group group = new Group();
group.setGroupName(ADMIN_GROUP);
GroupSearchCriteria grpSearch = new GroupSearchCriteria(group);
List list = ProvisionManager.getInstance().getUserProvisioningManager().getObjects(
grpSearch);
logger.debug("Group Size: " + list.size());
group = (Group) list.get(0);
logger.debug("Group : " + group.getGroupName());
Set users = group.getUsers();
logger.debug("Users : " + users);
Long[] userId = new Long[users.size()];
Iterator iterator = users.iterator(); // NOPMD by rukhsana_sameer on 10/17/08 3:49 PM
for (int i = 0; i < users.size(); i++)
{
userId[i] = ((User) iterator.next()).getUserId();
}
return userId;
}
/**
*
* @param userName
* @param objectId
* @param privilegeName
* @return
* @throws SMException
*/
public boolean isAuthorized(String userName, String objectId, String privilegeName)
throws SMException
{
boolean isAuthorized = false;
try
{
isAuthorized = ProvisionManager.getInstance().getAuthorizationManager()
.checkPermission(userName, objectId, privilegeName);
}
catch (CSException e)
{
Utility.getInstance().throwSMException(e, "Exception while checking permission",
"error.check.authorization");
}
return isAuthorized;
}
/* (non-Javadoc)
* @see edu.wustl.security.manager.SecurityManager#createUser(gov.nih.nci.security.authorization.domainobjects.User)
*/
public void createUser(User user) throws SMException
{
try
{
ProvisionManager.getInstance().getUserProvisioningManager().createUser(user);
if (isIdpEnabled())
{
IdPManager idp = IdPManager.getInstance();
idp.addUserToQueue(SecurityManagerPropertiesLocator.getInstance()
.getApplicationCtxName(), user);
}
}
catch (CSTransactionException exception)
{
String mesg = "Unable to create user " + user.getEmailId();
Utility.getInstance().throwSMException(exception, mesg, "sm.operation.error");
}
}
/**
* @return
*/
private boolean isIdpEnabled()
{
boolean result = false;
String idpEnabled = XMLPropertyHandler.getValue(this.IDP_ENABLED);
if (this.TRUE.equalsIgnoreCase(idpEnabled))
{
result = true;
}
return result;
}
/* (non-Javadoc)
* @see edu.wustl.security.manager.SecurityManager#modifyUser(gov.nih.nci.security.authorization.domainobjects.User)
*/
public void modifyUser(User user) throws SMException
{
try
{
ProvisionManager.getInstance().getUserProvisioningManager().modifyUser(user);
if (isIdpEnabled())
{
IdPManager idp = IdPManager.getInstance();
idp.addUserToQueue(SecurityManagerPropertiesLocator.getInstance()
.getApplicationCtxName(), user);
}
}
catch (CSException exception)
{
String mesg = "Unable to modify user: Exception: ";
Utility.getInstance().throwSMException(exception, mesg, "sm.operation.error");
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.geo.builders;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoShapeType;
import org.elasticsearch.common.geo.parsers.ShapeParser;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.unit.DistanceUnit.Distance;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.spatial4j.shape.Circle;
import java.io.IOException;
import java.util.Objects;
public class CircleBuilder extends ShapeBuilder<Circle, CircleBuilder> {
public static final ParseField FIELD_RADIUS = new ParseField("radius");
public static final GeoShapeType TYPE = GeoShapeType.CIRCLE;
private DistanceUnit unit = DistanceUnit.DEFAULT;
private double radius;
private Coordinate center;
/**
* Creates a circle centered at [0.0, 0.0].
* Center can be changed by calling {@link #center(Coordinate)} later.
*/
public CircleBuilder() {
this.center = ZERO_ZERO;
}
/**
* Read from a stream.
*/
public CircleBuilder(StreamInput in) throws IOException {
center(readFromStream(in));
radius(in.readDouble(), DistanceUnit.readFromStream(in));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
writeCoordinateTo(center, out);
out.writeDouble(radius);
unit.writeTo(out);
}
/**
* Set the center of the circle
*
* @param center coordinate of the circles center
* @return this
*/
public CircleBuilder center(Coordinate center) {
this.center = center;
return this;
}
/**
* set the center of the circle
* @param lon longitude of the center
* @param lat latitude of the center
* @return this
*/
public CircleBuilder center(double lon, double lat) {
return center(new Coordinate(lon, lat));
}
/**
* Get the center of the circle
*/
public Coordinate center() {
return center;
}
/**
* Set the radius of the circle. The String value will be parsed by {@link DistanceUnit}
* @param radius Value and unit of the circle combined in a string
* @return this
*/
public CircleBuilder radius(String radius) {
return radius(DistanceUnit.Distance.parseDistance(radius));
}
/**
* Set the radius of the circle
* @param radius radius of the circle (see {@link org.elasticsearch.common.unit.DistanceUnit.Distance})
* @return this
*/
public CircleBuilder radius(Distance radius) {
return radius(radius.value, radius.unit);
}
/**
* Set the radius of the circle
* @param radius value of the circles radius
* @param unit unit name of the radius value (see {@link DistanceUnit})
* @return this
*/
public CircleBuilder radius(double radius, String unit) {
return radius(radius, DistanceUnit.fromString(unit));
}
/**
* Set the radius of the circle
* @param radius value of the circles radius
* @param unit unit of the radius value (see {@link DistanceUnit})
* @return this
*/
public CircleBuilder radius(double radius, DistanceUnit unit) {
this.unit = unit;
this.radius = radius;
return this;
}
/**
* Get the radius of the circle without unit
*/
public double radius() {
return this.radius;
}
/**
* Get the radius unit of the circle
*/
public DistanceUnit unit() {
return this.unit;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
builder.field(FIELD_RADIUS.getPreferredName(), unit.toString(radius));
builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName());
toXContent(builder, center);
return builder.endObject();
}
@Override
public Circle buildS4J() {
return SPATIAL_CONTEXT.makeCircle(center.x, center.y, 360 * radius / unit.getEarthCircumference());
}
@Override
public Object buildLucene() {
throw new UnsupportedOperationException("CIRCLE geometry is not supported");
}
@Override
public GeoShapeType type() {
return TYPE;
}
@Override
public String toWKT() {
throw new UnsupportedOperationException("The WKT spec does not support CIRCLE geometry");
}
@Override
public int numDimensions() {
return Double.isNaN(center.z) ? 2 : 3;
}
@Override
public int hashCode() {
return Objects.hash(center, radius, unit.ordinal());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
CircleBuilder other = (CircleBuilder) obj;
return Objects.equals(center, other.center) &&
Objects.equals(radius, other.radius) &&
Objects.equals(unit.ordinal(), other.unit.ordinal());
}
}
| |
package com.github.cukedoctor.util;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import static com.github.cukedoctor.util.Assert.hasText;
/**
* Created by pestano on 02/06/15.
*/
public class FileUtil {
private final static Logger log = Logger.getLogger(FileUtil.class.getName());
public static final Pattern ADOC_FILE_EXTENSION = Pattern.compile("([^\\s]+(\\.(?i)(ad|adoc|asciidoc|asc))$)");
/**
* @param path full path to the json feature result
* @return absolute path to to json result file
*/
public static String findJsonFile(String path) {
if (path == null) {
path = "";
}
File f = new File(path);
if (f.exists()) {
return f.getAbsolutePath();
}
//relative path
if (path.startsWith("/")) {//remove slash to use relative paths
path = path.substring(1);
}
return Paths.get(path.trim()).toAbsolutePath().toString();
}
/**
* @param startDir initial directory to scan for features
* @return all found json files path that represent cucumber features
*/
public static List<String> findJsonFiles(String startDir) {
return findFiles(startDir, ".json");
}
public static List<String> findFiles(String startDir, final String suffix) {
return findFiles(startDir, suffix, false);
}
public static List<String> findFiles(String startDir, final String suffix, final boolean singleResult) {
return findFiles(startDir,suffix,singleResult,null);
}
public static List<String> findFiles(String startDir, final String suffix, final boolean singleResult, final String relativePath) {
final List<String> foundPaths = new ArrayList<>();
if (startDir == null) {
startDir = "";
}
Path startPath = Paths.get(startDir);
if (!Files.exists(startPath)) {
if (startDir.startsWith("/")) {// try to find using relative paths
startDir = startDir.substring(1);
startPath = Paths.get(startDir);
}
if (!Files.exists(startPath)) {
startPath = Paths.get("");
}
}
try {
Files.walkFileTree(startPath, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException {
final String fileName = file.getFileName().toString();
if (fileName.endsWith(suffix)) {
if(hasText(relativePath)) {
boolean isFile = file.toFile().isFile();
Path computedPath = Paths.get(relativePath).relativize(isFile ? file.getParent().toAbsolutePath() : file.toAbsolutePath());
if(isFile) {
foundPaths.add(computedPath.toString()+"/"+file.toFile().getName());
} else {
foundPaths.add(computedPath.toString());
}
} else {
foundPaths.add(file.toAbsolutePath().toString());
}
if (singleResult) {
return FileVisitResult.TERMINATE;
}
}
if (attrs.isDirectory()) {
return super.visitFile(file, attrs);
} else {
return FileVisitResult.SKIP_SUBTREE;
}
}
});
} catch (IOException e) {
log.log(Level.WARNING, "Problems scanning " + suffix + " files in path:" + startDir, e.getMessage());
}
return foundPaths;
}
/**
* Saves a file into filesystem. Note that name can be saved as absolute (if it has a leading slash) or relative to current path.
* EX: /target/name.adoc will save the file into
*
* @param name file name
* @param data file content
* @return the path of saved file
*/
public static File saveFile(String name, String data) {
if (name == null) {
name = "";
}
String fullyQualifiedName = name;
/**
* if filename is not absolute use current path as base dir
*/
if (!new File(fullyQualifiedName).isAbsolute()) {
fullyQualifiedName = Paths.get("").toAbsolutePath().toString() + "/" + name;
}
try {
//create subdirs (if there any)
if (fullyQualifiedName.contains("/")) {
File f = new File(fullyQualifiedName.substring(0, fullyQualifiedName.lastIndexOf("/")));
f.mkdirs();
}
File file = new File(fullyQualifiedName);
file.createNewFile();
FileUtils.write(file, data, "UTF-8");
log.info("Wrote: " + file.getAbsolutePath());
return file;
} catch (IOException e) {
log.log(Level.SEVERE, "Could not create file " + name, e);
return null;
}
}
public static File loadFile(String path) {
if (path == null) {
path = "/";
}
File f = new File(path);
if (f.exists()) {
return f.getAbsoluteFile();
}
if (!path.startsWith("/")) {
path = "/" + path;
}
return new File(Paths.get("").toAbsolutePath().toString() + path.trim());
}
public static boolean removeFile(String path) {
File fileToRemove = loadFile(path);
return fileToRemove.delete();
}
/**
* @param source resource from classpath
* @param dest dest path
* @return copied file
*/
public static File copyFileFromClassPath(String source, String dest) {
if (source != null && dest != null) {
try {
InputStream in = FileUtil.class.getResourceAsStream(source);
return saveFile(dest, IOUtils.toString(in));
} catch (IOException e) {
log.log(Level.SEVERE, "Could not copy source file: " + source + " to dest file: " + dest, e);
}
}
return null;
}
/**
* @param source file path
* @param dest dest path
* @return copied file
*/
public static File copyFile(String source, String dest) {
if (source != null && dest != null) {
File sourcefile = new File(source);
if (!sourcefile.exists()) {
log.warning(String.format("File %s not found.", sourcefile.getAbsolutePath()));
return null;
}
try {
InputStream in = new FileInputStream(source);
return saveFile(dest, IOUtils.toString(in));
} catch (IOException e) {
log.log(Level.SEVERE, "Could not copy source file: " + source + " to dest file: " + dest, e);
}
}
return null;
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.DefaultFactHandle;
import org.drools.core.common.EmptyBetaConstraints;
import org.drools.core.common.NetworkNode;
import org.drools.core.common.PropagationContextFactory;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.phreak.SegmentUtilities;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.rule.GroupElement;
import org.drools.core.rule.GroupElement.Type;
import org.drools.core.spi.PropagationContext;
import org.junit.Test;
import org.kie.api.KieBaseConfiguration;
import org.kie.internal.KnowledgeBaseFactory;
import org.kie.internal.builder.conf.RuleEngineOption;
import java.util.List;
import static org.junit.Assert.*;
public class RuleUnlinkingWithSegmentMemoryTest {
InternalKnowledgeBase kBase;
BuildContext buildContext;
PropagationContext context;
LeftInputAdapterNode lian;
BetaNode n1;
BetaNode n2;
BetaNode n3;
BetaNode n4;
BetaNode n5;
BetaNode n6;
BetaNode n7;
BetaNode n8;
BetaNode n9;
BetaNode n10;
RuleTerminalNode rtn1;
RuleTerminalNode rtn2;
RuleTerminalNode rtn3;
RuleImpl rule1;
RuleImpl rule2;
RuleImpl rule3;
static final int JOIN_NODE = 0;
static final int EXISTS_NODE = 1;
static final int NOT_NODE = 2;
static final int RULE_TERMINAL_NODE = 3;
private NetworkNode createNetworkNode(int id,
int type,
LeftTupleSource leftTupleSource,
RuleImpl rule) {
MockObjectSource mockObjectSource = new MockObjectSource( 8 );
LeftTupleSink networkNode = null;
switch ( type ) {
case JOIN_NODE : {
networkNode = new JoinNode( id, leftTupleSource, mockObjectSource, new EmptyBetaConstraints(), buildContext );
break;
}
case EXISTS_NODE : {
networkNode = new ExistsNode( id, leftTupleSource, mockObjectSource, new EmptyBetaConstraints(), buildContext );
break;
}
case NOT_NODE : {
networkNode = new NotNode( id, leftTupleSource, mockObjectSource, new EmptyBetaConstraints(), buildContext );
break;
}
case RULE_TERMINAL_NODE : {
networkNode = new RuleTerminalNode( id, leftTupleSource, rule, new GroupElement( Type.AND ), 0, buildContext);
break;
}
}
mockObjectSource.attach();
if ( NodeTypeEnums.isLeftTupleSource( networkNode ) ) {
((LeftTupleSource)networkNode).attach();
} else {
((RuleTerminalNode)networkNode).attach();
}
return networkNode;
}
public void setUp(int type) {
KieBaseConfiguration kconf = org.kie.internal.KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( RuleEngineOption.PHREAK );
kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase(kconf);
buildContext = new BuildContext( kBase, kBase.getReteooBuilder().getIdGenerator() );
PropagationContextFactory pctxFactory = kBase.getConfiguration().getComponentFactory().getPropagationContextFactory();
context = pctxFactory.createPropagationContext(0, PropagationContext.INSERTION, null, null, null);
ObjectTypeNode otn = new ObjectTypeNode( 4, null, new ClassObjectType( String.class ), buildContext );
lian = new LeftInputAdapterNode(5, otn, buildContext );
n1 = (BetaNode) createNetworkNode( 10, type, lian, null );
n2 = (BetaNode) createNetworkNode( 11, type, n1, null );
n3 = (BetaNode) createNetworkNode( 12, type, n2, null );
rule1 = new RuleImpl("rule1");
rule1.setActivationListener( "agenda" );
rtn1 = ( RuleTerminalNode ) createNetworkNode( 18, RULE_TERMINAL_NODE, n3, rule1);
n4 = (BetaNode) createNetworkNode( 13, type, n3, null );
n5 = (BetaNode) createNetworkNode( 14, type, n4, null );
rule2 = new RuleImpl("rule2");
rule2.setActivationListener( "agenda" );
rtn2 = ( RuleTerminalNode ) createNetworkNode( 19, RULE_TERMINAL_NODE, n5, rule2 );
n6 = (BetaNode) createNetworkNode( 15, type, n5, null );
n7 = (BetaNode) createNetworkNode( 16, type, n6, null );
n8 = (BetaNode) createNetworkNode( 17, type, n7, null );
rule3 = new RuleImpl("rule3");
rule3.setActivationListener( "agenda" );
rtn3 = ( RuleTerminalNode ) createNetworkNode( 20, RULE_TERMINAL_NODE, n8, rule3 );
lian.addAssociation( rule1, null );
lian.addAssociation( rule2, null );
lian.addAssociation( rule3, null );
n1.addAssociation( rule1, null );
n1.addAssociation( rule2, null );
n1.addAssociation( rule3, null );
n2.addAssociation( rule1, null );
n2.addAssociation( rule2, null );
n2.addAssociation( rule3, null );
n3.addAssociation( rule1, null );
n3.addAssociation( rule2, null );
n3.addAssociation( rule3, null );
n4.addAssociation( rule2, null );
n4.addAssociation( rule3, null );
n5.addAssociation( rule2, null );
n5.addAssociation( rule3, null );
n6.addAssociation( rule3, null );
n7.addAssociation( rule3, null );
n8.addAssociation( rule3, null );
}
@Test
public void testRuleSegmentsAllLinkedTestMasks() {
setUp( JOIN_NODE );
KieBaseConfiguration kconf = org.kie.internal.KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( RuleEngineOption.PHREAK );
InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase(kconf);
StatefulKnowledgeSessionImpl wm = new StatefulKnowledgeSessionImpl( 1L, kBase );
PathMemory rs = (PathMemory) wm.getNodeMemory( rtn1 );
assertFalse( rs.isRuleLinked() );
assertEquals( 1, rs.getAllLinkedMaskTest() );
rs = (PathMemory) wm.getNodeMemory( rtn2 );
assertFalse( rs.isRuleLinked() );
assertEquals( 3, rs.getAllLinkedMaskTest() );
rs = (PathMemory) wm.getNodeMemory( rtn3 );
assertFalse( rs.isRuleLinked() );
assertEquals( 7, rs.getAllLinkedMaskTest() );
}
@Test
public void testSegmentNodeReferencesToSegments() {
setUp( JOIN_NODE );
KieBaseConfiguration kconf = org.kie.internal.KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( RuleEngineOption.PHREAK );
InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase(kconf);
StatefulKnowledgeSessionImpl wm = new StatefulKnowledgeSessionImpl( 1L, kBase );
BetaMemory bm = null;
List<PathMemory> list;
PathMemory rtn1Rs = (PathMemory) wm.getNodeMemory( rtn1 );
PathMemory rtn2Rs = (PathMemory) wm.getNodeMemory( rtn2 );
PathMemory rtn3Rs = (PathMemory) wm.getNodeMemory( rtn3 );
// lian
SegmentUtilities.createSegmentMemory( lian, wm );
LeftInputAdapterNode.LiaNodeMemory lmem = (LeftInputAdapterNode.LiaNodeMemory) wm.getNodeMemory( lian );
assertEquals( 1, lmem.getNodePosMaskBit() );
// n1
SegmentUtilities.createSegmentMemory( n1, wm );
bm = (BetaMemory) wm.getNodeMemory( n1 );
assertEquals( 2, bm.getNodePosMaskBit() );
assertEquals( 15, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 1, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 3, list.size());
assertTrue( list.contains( rtn1Rs ) );
assertTrue( list.contains( rtn2Rs ) );
assertTrue( list.contains( rtn3Rs ) );
// n2
bm = (BetaMemory) wm.getNodeMemory( n2 );
assertEquals( 4, bm.getNodePosMaskBit() );
assertEquals( 15, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 1, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 3, list.size());
assertTrue( list.contains( rtn1Rs ) );
assertTrue( list.contains( rtn2Rs ) );
assertTrue( list.contains( rtn3Rs ) );
// n3
bm = (BetaMemory) wm.getNodeMemory( n3 );
assertEquals( 8, bm.getNodePosMaskBit() );
assertEquals( 15, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 1, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 3, list.size());
assertTrue( list.contains( rtn1Rs ) );
assertTrue( list.contains( rtn2Rs ) );
assertTrue( list.contains( rtn3Rs ) );
// n4
SegmentUtilities.createSegmentMemory( n4, wm );
bm = (BetaMemory) wm.getNodeMemory( n4 );
assertEquals( 1, bm.getNodePosMaskBit() );
assertEquals( 3, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 2, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 2, list.size());
assertTrue( list.contains( rtn2Rs ) );
assertTrue( list.contains( rtn3Rs ) );
// n5
bm = (BetaMemory) wm.getNodeMemory( n5 );
assertEquals( 2, bm.getNodePosMaskBit() );
assertEquals( 3, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 2, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 2, list.size());
assertTrue( list.contains( rtn2Rs ) );
assertTrue( list.contains( rtn3Rs ) );
// n6
SegmentUtilities.createSegmentMemory( n6, wm );
bm = (BetaMemory) wm.getNodeMemory( n6 );
assertEquals( 1, bm.getNodePosMaskBit() );
assertEquals( 7, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 4, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 1, list.size());
assertTrue( list.contains( rtn3Rs ) );
// n7
bm = (BetaMemory) wm.getNodeMemory( n7 );
assertEquals( 2, bm.getNodePosMaskBit() );
assertEquals( 7, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 4, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 1, list.size());
assertTrue( list.contains( rtn3Rs ) );
// n8
bm = (BetaMemory) wm.getNodeMemory( n8 );
assertEquals( 4, bm.getNodePosMaskBit() );
assertEquals( 7, bm.getSegmentMemory().getAllLinkedMaskTest() );
assertEquals( 4, bm.getSegmentMemory().getSegmentPosMaskBit() );
list = bm.getSegmentMemory().getPathMemories();
assertEquals( 1, list.size());
assertTrue( list.contains( rtn3Rs ) );
}
@Test
public void testRuleSegmentLinking() {
setUp( JOIN_NODE );
KieBaseConfiguration kconf = org.kie.internal.KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( RuleEngineOption.PHREAK );
InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase(kconf);
StatefulKnowledgeSessionImpl wm = new StatefulKnowledgeSessionImpl( 1L, kBase );
BetaMemory bm = null;
List<PathMemory> list;
PathMemory rtn1Rs = (PathMemory) wm.getNodeMemory( rtn1 );
PathMemory rtn2Rs = (PathMemory) wm.getNodeMemory( rtn2 );
PathMemory rtn3Rs = (PathMemory) wm.getNodeMemory( rtn3 );
DefaultFactHandle f1 = (DefaultFactHandle) wm.insert( "test1" );
lian.assertObject( f1, context, wm );
n1.assertObject( f1, context, wm );
n3.assertObject( f1, context, wm );
n4.assertObject( f1, context, wm );
n8.assertObject( f1, context, wm );
assertFalse( rtn1Rs.isRuleLinked() );
assertFalse( rtn2Rs.isRuleLinked() );
assertFalse( rtn3Rs.isRuleLinked() );
// Link in Rule1
bm = (BetaMemory) wm.getNodeMemory( n2 );
assertFalse( bm.getSegmentMemory().isSegmentLinked() );
DefaultFactHandle f2 = (DefaultFactHandle) wm.insert( "test2" );
n2.assertObject( f2, context, wm );
assertTrue( bm.getSegmentMemory().isSegmentLinked() );
assertTrue( rtn1Rs.isRuleLinked() );
assertFalse( rtn2Rs.isRuleLinked() );
assertFalse( rtn3Rs.isRuleLinked() );
// Link in Rule2
bm = (BetaMemory) wm.getNodeMemory( n5 );
assertFalse( bm.getSegmentMemory().isSegmentLinked() );
n5.assertObject( f1, context, wm );
assertTrue( bm.getSegmentMemory().isSegmentLinked() );
assertTrue( rtn1Rs.isRuleLinked() );
assertTrue( rtn2Rs.isRuleLinked() );
assertFalse( rtn3Rs.isRuleLinked() );
// Link in Rule3
n6.assertObject( f1, context, wm );
n7.assertObject( f1, context, wm );
assertTrue( bm.getSegmentMemory().isSegmentLinked() );
assertTrue( rtn1Rs.isRuleLinked() );
assertTrue( rtn2Rs.isRuleLinked() );
assertTrue( rtn3Rs.isRuleLinked() );
// retract n2, should unlink all rules
n2.retractRightTuple( f2.getFirstRightTuple(), context, wm );
assertFalse( rtn1Rs.isRuleLinked() );
assertFalse( rtn2Rs.isRuleLinked() );
assertFalse( rtn3Rs.isRuleLinked() );
}
}
| |
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.nn.layers;
import org.deeplearning4j.berkeley.Pair;
import org.deeplearning4j.exception.DL4JInvalidInputException;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.Updater;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.gradient.DefaultGradient;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.params.DefaultParamInitializer;
import org.deeplearning4j.nn.params.PretrainParamInitializer;
import org.deeplearning4j.optimize.Solver;
import org.deeplearning4j.optimize.api.ConvexOptimizer;
import org.deeplearning4j.optimize.api.IterationListener;
import org.deeplearning4j.util.Dropout;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.NDArrayIndex;
import java.lang.reflect.Constructor;
import java.util.*;
/**
* A layer with a bias
* and activation function
* @author Adam Gibson
*/
public abstract class BaseLayer<LayerConfT extends org.deeplearning4j.nn.conf.layers.Layer>
implements Layer {
protected INDArray input;
protected INDArray paramsFlattened;
protected INDArray gradientsFlattened;
protected Map<String,INDArray> params;
protected transient Map<String,INDArray> gradientViews;
protected NeuralNetConfiguration conf;
protected INDArray dropoutMask;
protected boolean dropoutApplied = false;
protected double score = 0.0;
protected ConvexOptimizer optimizer;
protected Gradient gradient;
protected Collection<IterationListener> iterationListeners = new ArrayList<>();
protected int index = 0;
protected INDArray maskArray;
protected Solver solver;
public BaseLayer(NeuralNetConfiguration conf) {
this.conf = conf;
}
public BaseLayer(NeuralNetConfiguration conf, INDArray input) {
this.input = input;
this.conf = conf;
}
protected LayerConfT layerConf() {
return (LayerConfT) this.conf.getLayer();
}
public INDArray getInput() {
return input;
}
@Override
public void setInput(INDArray input) {
this.input = input;
dropoutApplied = false;
}
@Override
public int getIndex() {
return index;
}
@Override
public void setIndex(int index) {
this.index = index;
}
@Override
public Collection<IterationListener> getListeners() {
return iterationListeners;
}
@Override
public void setListeners(Collection<IterationListener> listeners) {
this.iterationListeners = listeners != null ? listeners : new ArrayList<IterationListener>();
}
@Override
public void setListeners(IterationListener... listeners) {
this.iterationListeners = new ArrayList<>();
for(IterationListener l : listeners)
iterationListeners.add(l);
}
@Override
public Gradient error(INDArray errorSignal) {
INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY);
Gradient nextLayerGradient = new DefaultGradient();
INDArray wErrorSignal = errorSignal.mmul(W.transpose());
nextLayerGradient.gradientForVariable().put(DefaultParamInitializer.WEIGHT_KEY,wErrorSignal);
return nextLayerGradient;
}
@Override
public INDArray derivativeActivation(INDArray input) {
INDArray deriv = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf().getLayer().getActivationFunction(), input).derivative());
return deriv;
}
@Override
public Gradient calcGradient(Gradient layerError, INDArray activation) {
Gradient ret = new DefaultGradient();
INDArray weightErrorSignal = layerError.getGradientFor(DefaultParamInitializer.WEIGHT_KEY);
INDArray weightError = weightErrorSignal.transpose().mmul(activation).transpose();
ret.gradientForVariable().put(DefaultParamInitializer.WEIGHT_KEY,weightError);
INDArray biasGradient = weightError.mean(0);
ret.gradientForVariable().put(DefaultParamInitializer.BIAS_KEY,biasGradient);
return ret;
}
@Override
public Pair<Gradient,INDArray> backpropGradient(INDArray epsilon) {
//If this layer is layer L, then epsilon is (w^(L+1)*(d^(L+1))^T) (or equivalent)
INDArray z = preOutput(true); //Note: using preOutput(INDArray) can't be used as this does a setInput(input) and resets the 'appliedDropout' flag
INDArray activationDerivative = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf().getLayer().getActivationFunction(), z).derivative());
INDArray delta = epsilon.muli(activationDerivative);
if(maskArray != null){
delta.muliColumnVector(maskArray);
}
Gradient ret = new DefaultGradient();
INDArray weightGrad = gradientViews.get(DefaultParamInitializer.WEIGHT_KEY); //f order
Nd4j.gemm(input,delta,weightGrad,true,false,1.0,0.0);
INDArray biasGrad = gradientViews.get(DefaultParamInitializer.BIAS_KEY);
biasGrad.assign(delta.sum(0)); //TODO: do this without the assign
ret.gradientForVariable().put(DefaultParamInitializer.WEIGHT_KEY, weightGrad);
ret.gradientForVariable().put(DefaultParamInitializer.BIAS_KEY, biasGrad);
INDArray epsilonNext = params.get(DefaultParamInitializer.WEIGHT_KEY).mmul(delta.transpose()).transpose();
return new Pair<>(ret,epsilonNext);
}
public void fit() {
fit(this.input);
}
@Override
public void computeGradientAndScore() {
if (this.input == null)
return;
INDArray output = activate(true);
setScoreWithZ(output);
}
protected void setScoreWithZ(INDArray z) {
}
@Override
public INDArray preOutput(INDArray x, TrainingMode training) {
return preOutput(x,training == TrainingMode.TRAIN);
}
@Override
public INDArray activate(TrainingMode training) {
return activate(training == TrainingMode.TRAIN);
}
@Override
public INDArray activate(INDArray input, TrainingMode training) {
return activate(input,training == TrainingMode.TRAIN);
}
/**
* Objective function: the specified objective
* @return the score for the objective
*/
@Override
public double score() {
return score;
}
@Override
public Gradient gradient() {
return gradient;
}
/**
* iterate one iteration of the network
*
* @param input the input to iterate on
*/
@Override
public void iterate(INDArray input) {
setInput(input.dup());
applyDropOutIfNecessary(true);
Gradient gradient = gradient();
for(String paramType : gradient.gradientForVariable().keySet()) {
update(gradient.getGradientFor(paramType), paramType);
}
}
@Override
public void update(Gradient gradient) {
for(String paramType : gradient.gradientForVariable().keySet()) {
update(gradient.getGradientFor(paramType), paramType);
}
}
@Override
public void update(INDArray gradient, String paramType) {
setParam(paramType, getParam(paramType).addi(gradient));
}
@Override
public ConvexOptimizer getOptimizer() {
if(optimizer == null) {
Solver solver = new Solver.Builder()
.model(this).configure(conf())
.build();
this.optimizer = solver.getOptimizer();
}
return optimizer;
}
@Override
public void setConf(NeuralNetConfiguration conf) {
this.conf = conf;
}
/**Returns the parameters of the neural network as a flattened row vector
* @return the parameters of the neural network
*/
@Override
public INDArray params() {
return Nd4j.toFlattened('f',params.values());
}
@Override
public INDArray getParam(String param) {
return params.get(param);
}
@Override
public void setParam(String key, INDArray val) {
if(params.containsKey(key)) params.get(key).assign(val);
else params.put(key, val);
}
@Override
public void setParams(INDArray params) {
if(params == paramsFlattened) return; //no op
setParams(params,'f');
}
protected void setParams(INDArray params, char order){
List<String> parameterList = conf.variables();
int length = 0;
for(String s : parameterList)
length += getParam(s).length();
if(params.length() != length)
throw new IllegalArgumentException("Unable to set parameters: must be of length " + length);
int idx = 0;
Set<String> paramKeySet = this.params.keySet();
for(String s : paramKeySet) {
INDArray param = getParam(s);
INDArray get = params.get(NDArrayIndex.point(0),NDArrayIndex.interval(idx, idx + param.length()));
if(param.length() != get.length())
throw new IllegalStateException("Parameter " + s + " should have been of length " + param.length() + " but was " + get.length());
param.assign(get.reshape(order,param.shape())); //Use assign due to backprop params being a view of a larger array
idx += param.length();
}
}
@Override
public void setParamsViewArray(INDArray params){
if(this.params != null && params.length() != numParams()) throw new IllegalArgumentException("Invalid input: expect params of length " + numParams()
+ ", got params of length " + params.length());
this.paramsFlattened = params;
}
@Override
public void setBackpropGradientsViewArray(INDArray gradients) {
if(this.params != null && gradients.length() != numParams()) throw new IllegalArgumentException("Invalid input: expect gradients array of length " + numParams(true)
+ ", got params of length " + gradients.length());
this.gradientsFlattened = gradients;
this.gradientViews = conf.getLayer().initializer().getGradientsFromFlattened(conf,gradients);
}
@Override
public void setParamTable(Map<String, INDArray> paramTable) {
this.params = paramTable;
}
@Override
public void initParams() {
// paramInitializer.init(paramTable(), conf());
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public Map<String, INDArray> paramTable() {
return params;
}
@Override
public INDArray preOutput(INDArray x, boolean training) {
if (x == null)
throw new IllegalArgumentException("No null input allowed");
setInput(x);
return preOutput(training);
}
public INDArray preOutput(boolean training) {
applyDropOutIfNecessary(training);
INDArray b = getParam(DefaultParamInitializer.BIAS_KEY);
INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY);
//Input validation:
if(input.rank() != 2 || input.columns() != W.rows() ){
if(input.rank() != 2){
throw new DL4JInvalidInputException("Input that is not a matrix; expected matrix (rank 2), got rank " + input.rank()
+ " array with shape " + Arrays.toString(input.shape()));
}
throw new DL4JInvalidInputException("Input size (" + input.columns() + " columns; shape = " + Arrays.toString(input.shape())
+ ") is invalid: does not match layer input size (layer # inputs = " + W.size(0) + ")");
}
if(conf.isUseDropConnect() && training && conf.getLayer().getDropOut() > 0) {
W = Dropout.applyDropConnect(this,DefaultParamInitializer.WEIGHT_KEY);
}
INDArray ret = input.mmul(W).addiRowVector(b);
if(maskArray != null){
ret.muliColumnVector(maskArray);
}
return ret;
}
@Override
public INDArray activate(boolean training) {
INDArray z = preOutput(training);
INDArray ret = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(
conf.getLayer().getActivationFunction(), z, conf.getExtraArgs() ));
if(maskArray != null){
ret.muliColumnVector(maskArray);
}
return ret;
}
@Override
public INDArray activate(INDArray input) {
setInput(input);
return activate(true);
}
@Override
public INDArray activate(INDArray input, boolean training) {
setInput(input);
return activate(training);
}
@Override
public INDArray activate() {
return activate(false);
}
/**
* Classify input
* @param x the input (can either be a matrix or vector)
* If it's a matrix, each row is considered an example
* and associated rows are classified accordingly.
* Each row will be the likelihood of a label given that example
* @return a probability distribution for each row
*/
@Override
public INDArray preOutput(INDArray x) {
return preOutput(x, true);
}
@Override
public double calcL2() {
if(!conf.isUseRegularization() || conf.getLayer().getL2() <= 0.0 ) return 0.0;
//L2 norm: sqrt( sum_i x_i^2 ) -> want sum squared weights, so l2 norm squared
double l2Norm = getParam(DefaultParamInitializer.WEIGHT_KEY).norm2Number().doubleValue();
return 0.5 * conf.getLayer().getL2() * l2Norm * l2Norm;
}
@Override
public double calcL1() {
if(!conf.isUseRegularization() || conf.getLayer().getL1() <= 0.0 ) return 0.0;
return conf.getLayer().getL1() * getParam(DefaultParamInitializer.WEIGHT_KEY).norm1Number().doubleValue();
}
@Override
public int batchSize() {
return input.size(0);
}
@Override
public INDArray activationMean() {
INDArray b = getParam(DefaultParamInitializer.BIAS_KEY);
INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY);
return input().mmul(W).addiRowVector(b);
}
@Override
public NeuralNetConfiguration conf() {
return conf;
}
@Override
public void clear() {
if(input != null) {
input.data().destroy();
input = null;
}
}
protected void applyDropOutIfNecessary(boolean training) {
if(conf.getLayer().getDropOut() > 0 && !conf.isUseDropConnect() && training && !dropoutApplied ) {
Dropout.applyDropout(input,conf.getLayer().getDropOut());
dropoutApplied = true;
}
}
/**
* Averages the given logistic regression from a mini batch into this layer
* @param l the logistic regression layer to average into this layer
* @param batchSize the batch size
*/
@Override
public void merge(Layer l, int batchSize) {
setParams(params().addi(l.params().divi(batchSize)));
computeGradientAndScore();
}
@Override
public Layer clone() {
Layer layer = null;
try {
Constructor c = getClass().getConstructor(NeuralNetConfiguration.class);
layer = (Layer) c.newInstance(conf);
Map<String,INDArray> linkedTable = new LinkedHashMap<>();
for (Map.Entry<String, INDArray> entry : params.entrySet()) {
linkedTable.put(entry.getKey(),entry.getValue().dup());
}
layer.setParamTable(linkedTable);
} catch (Exception e) {
e.printStackTrace();
}
return layer;
}
@Override
public Type type() {
return Type.FEED_FORWARD;
}
/**
* The number of parameters for the model
*
* @return the number of parameters for the model
*/
@Override
public int numParams() {
int ret = 0;
for(INDArray val : params.values())
ret += val.length();
return ret;
}
@Override
public int numParams(boolean backwards) {
return numParams();
}
@Override
public void fit(INDArray input) {
if(input != null) {
setInput(input.dup());
applyDropOutIfNecessary(true);
}
if(solver == null){
solver = new Solver.Builder()
.model(this).configure(conf()).listeners(getListeners())
.build();
//Set the updater state view array. For MLN and CG, this is done by MultiLayerUpdater and ComputationGraphUpdater respectively
Updater updater = solver.getOptimizer().getUpdater();
int updaterStateSize = updater.stateSizeForLayer(this);
if(updaterStateSize > 0) updater.setStateViewArray(this, Nd4j.createUninitialized(new int[]{1,updaterStateSize},Nd4j.order()), true);
}
this.optimizer = solver.getOptimizer();
solver.optimize();
}
@Override
public Pair<Gradient, Double> gradientAndScore() {
return new Pair<>(gradient(),score());
}
@Override
public INDArray input() {
return input;
}
@Override
public void validateInput() {
}
/**
* Create a gradient list based on the passed in parameters.
* Will throw an IllegalArgumentException if the number of gradient matrices
* isn't equal to the number of keys in the parameter list
* @param gradients the gradients to create from
* @return the create based on the passed in ndarrays
*/
protected Gradient createGradient(INDArray...gradients) {
Gradient ret = new DefaultGradient();
if(gradients.length != conf.variables().size())
throw new IllegalArgumentException("Unable to create gradients...not equal to number of parameters");
for(int i = 0; i < gradients.length; i++) {
INDArray paramI = getParam(conf.variables().get(i));
if(!Arrays.equals(paramI.shape(),gradients[i].shape()))
throw new IllegalArgumentException("Gradient at index " + i + " had wrong gradient size of " + Arrays.toString(gradients[i].shape()) + " when should have been " + Arrays.toString(paramI.shape()));
ret.gradientForVariable().put(conf.variables().get(i),gradients[i]);
}
return ret;
}
@Override
public String toString() {
return getClass().getName() + "{" +
"conf=" + conf +
", dropoutMask=" + dropoutMask +
", score=" + score +
", optimizer=" + optimizer +
", listeners=" + iterationListeners +
'}';
}
@Override
public Layer transpose() {
if(!(conf.getLayer() instanceof org.deeplearning4j.nn.conf.layers.FeedForwardLayer))
throw new UnsupportedOperationException("unsupported layer type: " + conf.getLayer().getClass().getName());
INDArray w = getParam(DefaultParamInitializer.WEIGHT_KEY);
INDArray b = getParam(DefaultParamInitializer.BIAS_KEY);
INDArray vb = getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY);
Layer layer;
try {
NeuralNetConfiguration clone = conf.clone(); // assume a deep clone here
org.deeplearning4j.nn.conf.layers.FeedForwardLayer clonedLayerConf =
(org.deeplearning4j.nn.conf.layers.FeedForwardLayer) clone.getLayer();
int nIn = clonedLayerConf.getNOut();
int nOut = clonedLayerConf.getNIn();
clonedLayerConf.setNIn(nIn);
clonedLayerConf.setNOut(nOut);
//Need to swap the hidden and visible biases for pretrain layers
INDArray newB;
INDArray newVB = null;
int totalParams = w.length();
if(vb != null){
newB = vb.dup();
newVB = b.dup();
totalParams += newB.length() + newVB.length();
} else {
newB = Nd4j.create(1,nOut);
totalParams += newB.length();
}
INDArray paramsView = Nd4j.create(1,totalParams);
layer = clone.getLayer().instantiate(clone, iterationListeners, this.index, paramsView, true);
layer.setParam(DefaultParamInitializer.WEIGHT_KEY,w.transpose().dup());
layer.setParam(DefaultParamInitializer.BIAS_KEY,newB);
if(vb != null) layer.setParam(PretrainParamInitializer.VISIBLE_BIAS_KEY, newVB);
} catch (Exception e) {
throw new RuntimeException("unable to construct transposed layer", e);
}
return layer;
}
@Override
public void accumulateScore(double accum) {
score += accum;
}
@Override
public void setInputMiniBatchSize(int size){
}
@Override
public int getInputMiniBatchSize(){
return input.size(0);
}
@Override
public void applyLearningRateScoreDecay() {
for (Map.Entry<String, Double> lrPair : conf.getLearningRateByParam().entrySet())
conf.setLearningRateByParam(lrPair.getKey(), lrPair.getValue() * (conf.getLrPolicyDecayRate() + Nd4j.EPS_THRESHOLD));
}
@Override
public void setMaskArray(INDArray maskArray) {
this.maskArray = maskArray;
}
@Override
public INDArray getMaskArray(){
return maskArray;
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.origin.tcp;
import com.google.common.primitives.Bytes;
import com.streamsets.pipeline.api.ErrorCode;
import com.streamsets.pipeline.api.OnRecordError;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.lib.parser.net.NetTestUtils;
import com.streamsets.pipeline.lib.parser.net.syslog.SyslogFramingMode;
import com.streamsets.pipeline.lib.parser.net.syslog.SyslogMessage;
import com.streamsets.pipeline.lib.parser.text.TextDataParserFactory;
import com.streamsets.pipeline.lib.tls.TlsConfigErrors;
import com.streamsets.pipeline.sdk.PushSourceRunner;
import com.streamsets.pipeline.stage.common.DataFormatErrors;
import com.streamsets.pipeline.stage.util.tls.TLSTestUtils;
import com.streamsets.testing.NetworkUtils;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.avro.ipc.NettyTransceiver;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.commons.io.Charsets;
import org.apache.commons.lang3.StringUtils;
import org.apache.flume.source.avro.AvroFlumeEvent;
import org.apache.flume.source.avro.AvroSourceProtocol;
import org.apache.flume.source.avro.Status;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.security.KeyPair;
import java.security.cert.Certificate;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingDeque;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.collection.IsMapContaining.hasKey;
import static com.streamsets.testing.Matchers.fieldWithValue;
public class TestTCPServerSource {
public static final String TEN_DELIMITED_RECORDS = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\nnine\nten\n";
public static final String SYSLOG_RECORD = "<42>Mar 24 17:18:10 10.1.2.34 Got an error";
@Test
public void syslogRecords() {
Charset charset = Charsets.ISO_8859_1;
final TCPServerSourceConfig configBean = createConfigBean(charset);
TCPServerSource source = new TCPServerSource(configBean);
List<Stage.ConfigIssue> issues = new LinkedList<>();
EmbeddedChannel ch = new EmbeddedChannel(source.buildByteBufToMessageDecoderChain(issues).toArray(new ChannelHandler[0]));
ch.writeInbound(Unpooled.copiedBuffer(SYSLOG_RECORD + configBean.nonTransparentFramingSeparatorCharStr, charset));
assertSyslogRecord(ch);
assertFalse(ch.finishAndReleaseAll());
configBean.syslogFramingMode = SyslogFramingMode.OCTET_COUNTING;
EmbeddedChannel ch2 = new EmbeddedChannel(source.buildByteBufToMessageDecoderChain(issues).toArray(new ChannelHandler[0]));
ch2.writeInbound(Unpooled.copiedBuffer(SYSLOG_RECORD.length() + " " + SYSLOG_RECORD, charset));
assertSyslogRecord(ch2);
assertFalse(ch2.finishAndReleaseAll());
}
private void assertSyslogRecord(EmbeddedChannel ch) {
Object in1 = ch.readInbound();
assertThat(in1, notNullValue());
assertThat(in1, instanceOf(SyslogMessage.class));
SyslogMessage msg1 = (SyslogMessage) in1;
assertThat(msg1.getHost(), equalTo("10.1.2.34"));
assertThat(msg1.getRemainingMessage(), equalTo("Got an error"));
assertThat(msg1.getPriority(), equalTo(42));
assertThat(msg1.getFacility(), equalTo(5));
assertThat(msg1.getSeverity(), equalTo(2));
}
@Test
public void initMethod() throws Exception {
final TCPServerSourceConfig configBean = createConfigBean(Charsets.ISO_8859_1);
initSourceAndValidateIssues(configBean);
// empty ports
configBean.ports = new LinkedList<>();
initSourceAndValidateIssues(configBean, Errors.TCP_02);
// invalid ports
// too large
configBean.ports = Arrays.asList("123456789");
initSourceAndValidateIssues(configBean, Errors.TCP_03);
// not a number
configBean.ports = Arrays.asList("abcd");
initSourceAndValidateIssues(configBean, Errors.TCP_03);
// start TLS config tests
configBean.ports = randomSinglePort();
configBean.tlsConfigBean.tlsEnabled = true;
configBean.tlsConfigBean.keyStoreFilePath = "non-existent-file-path";
initSourceAndValidateIssues(configBean, TlsConfigErrors.TLS_01);
File blankTempFile = File.createTempFile("blank", "txt");
blankTempFile.deleteOnExit();
configBean.tlsConfigBean.keyStoreFilePath = blankTempFile.getAbsolutePath();
initSourceAndValidateIssues(configBean, TlsConfigErrors.TLS_21);
// now, try with real keystore
String hostname = TLSTestUtils.getHostname();
File testDir = new File("target", UUID.randomUUID().toString()).getAbsoluteFile();
testDir.deleteOnExit();
final File keyStore = new File(testDir, "keystore.jks");
keyStore.deleteOnExit();
Assert.assertTrue(testDir.mkdirs());
final String keyStorePassword = "keystore";
KeyPair keyPair = TLSTestUtils.generateKeyPair();
Certificate cert = TLSTestUtils.generateCertificate("CN=" + hostname, keyPair, 30);
TLSTestUtils.createKeyStore(keyStore.toString(), keyStorePassword, "web", keyPair.getPrivate(), cert);
configBean.tlsConfigBean.keyStoreFilePath = keyStore.getAbsolutePath();
configBean.tlsConfigBean.keyStorePassword = () -> "invalid-password";
initSourceAndValidateIssues(configBean, TlsConfigErrors.TLS_21);
// finally, a valid certificate/config
configBean.tlsConfigBean.keyStorePassword = () -> keyStorePassword;
initSourceAndValidateIssues(configBean);
// ack ELs
configBean.recordProcessedAckMessage = "${invalid EL)";
initSourceAndValidateIssues(configBean, Errors.TCP_30);
configBean.recordProcessedAckMessage = "${time:now()}";
configBean.batchCompletedAckMessage = "${another invalid EL]";
initSourceAndValidateIssues(configBean, Errors.TCP_31);
configBean.batchCompletedAckMessage = "${record:value('/first')}";
// syslog mode
configBean.tcpMode = TCPMode.SYSLOG;
configBean.syslogFramingMode = SyslogFramingMode.NON_TRANSPARENT_FRAMING;
configBean.nonTransparentFramingSeparatorCharStr = "";
initSourceAndValidateIssues(configBean, Errors.TCP_40);
configBean.syslogFramingMode = SyslogFramingMode.OCTET_COUNTING;
initSourceAndValidateIssues(configBean);
// separated records
configBean.tcpMode = TCPMode.DELIMITED_RECORDS;
configBean.dataFormatConfig.charset = Charsets.UTF_8.name();
initSourceAndValidateIssues(configBean, Errors.TCP_41);
configBean.recordSeparatorStr = "";
initSourceAndValidateIssues(configBean, Errors.TCP_40);
configBean.recordSeparatorStr = "x";
initSourceAndValidateIssues(configBean, DataFormatErrors.DATA_FORMAT_12);
configBean.dataFormat = DataFormat.TEXT;
initSourceAndValidateIssues(configBean);
}
@Test
public void runTextRecordsWithAck() throws StageException, IOException, ExecutionException, InterruptedException {
final String recordSeparatorStr = "\n";
final String[] expectedRecords = TEN_DELIMITED_RECORDS.split(recordSeparatorStr);
final int batchSize = expectedRecords.length;
final Charset charset = Charsets.ISO_8859_1;
final TCPServerSourceConfig configBean = createConfigBean(charset);
configBean.dataFormat = DataFormat.TEXT;
configBean.tcpMode = TCPMode.DELIMITED_RECORDS;
configBean.recordSeparatorStr = recordSeparatorStr;
configBean.ports = NetworkUtils.getRandomPorts(1);
configBean.recordProcessedAckMessage = "record_ack_${record:id()}";
configBean.batchCompletedAckMessage = "batch_ack_${batchSize}";
configBean.batchSize = batchSize;
final TCPServerSource source = new TCPServerSource(configBean);
final String outputLane = "lane";
final PushSourceRunner runner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.build();
final List<Record> records = new LinkedList<>();
runner.runInit();
EventLoopGroup workerGroup = new NioEventLoopGroup();
ChannelFuture channelFuture = startTcpClient(
configBean,
workerGroup,
TEN_DELIMITED_RECORDS.getBytes(charset),
true
);
final Channel channel = channelFuture.channel();
TCPServerSourceClientHandler clientHandler = channel.pipeline().get(TCPServerSourceClientHandler.class);
runner.runProduce(new HashMap<>(), batchSize, output -> {
records.addAll(output.getRecords().get(outputLane));
runner.setStop();
});
// Wait until the connection is closed.
runner.waitOnProduce();
final List<String> responses = new LinkedList<>();
for (int i = 0; i < batchSize + 1; i++) {
// one for each record, plus one for the batch
responses.add(clientHandler.getResponse());
}
channel.close();
workerGroup.shutdownGracefully();
assertThat(records, hasSize(batchSize));
final List<String> expectedAcks = new LinkedList<>();
for (int i = 0; i < records.size(); i++) {
// validate the output record value
assertThat(records.get(i).get("/text").getValueAsString(), equalTo(expectedRecords[i]));
// validate the record-level ack
expectedAcks.add(String.format("record_ack_%s", records.get(i).getHeader().getSourceId()));
}
// validate the batch-level ack
expectedAcks.add(String.format("batch_ack_%d", batchSize));
// because of the vagaries of TCP, we can't be sure that a single ack is returned in each discrete read
// this is due to the fact that the server can choose to flush the buffer in different ways, and the client
// can choose if/how to buffer on its side when reading from the channel
// therefore, we will simply combine all acks in the expected order into a single String and assert at that
// level, rather than at an individual read/expected ack level
final String combinedAcks = StringUtils.join(responses, "");
assertThat(combinedAcks, startsWith(StringUtils.join(expectedAcks, "")));
}
@Test
public void errorHandling() throws StageException, IOException, ExecutionException, InterruptedException {
final Charset charset = Charsets.ISO_8859_1;
final TCPServerSourceConfig configBean = createConfigBean(charset);
configBean.dataFormat = DataFormat.JSON;
configBean.tcpMode = TCPMode.DELIMITED_RECORDS;
configBean.recordSeparatorStr = "\n";
configBean.ports = NetworkUtils.getRandomPorts(1);
final TCPServerSource source = new TCPServerSource(configBean);
final String outputLane = "lane";
final PushSourceRunner toErrorRunner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.TO_ERROR)
.build();
final List<Record> records = new LinkedList<>();
final List<Record> errorRecords = new LinkedList<>();
runAndCollectRecords(
toErrorRunner,
configBean,
records,
errorRecords,
1,
outputLane,
"{\"invalid_json\": yes}\n".getBytes(charset),
true,
false
);
assertThat(records, empty());
assertThat(errorRecords, hasSize(1));
assertThat(
errorRecords.get(0).getHeader().getErrorCode(),
equalTo(com.streamsets.pipeline.lib.parser.Errors.DATA_PARSER_04.getCode())
);
final PushSourceRunner discardRunner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.DISCARD)
.build();
records.clear();
errorRecords.clear();
configBean.ports = NetworkUtils.getRandomPorts(1);
runAndCollectRecords(
discardRunner,
configBean,
records,
errorRecords,
1,
outputLane,
"{\"invalid_json\": yes}\n".getBytes(charset),
true,
false
);
assertThat(records, empty());
assertThat(errorRecords, empty());
configBean.ports = NetworkUtils.getRandomPorts(1);
final PushSourceRunner stopPipelineRunner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.STOP_PIPELINE)
.build();
records.clear();
errorRecords.clear();
try {
runAndCollectRecords(
stopPipelineRunner,
configBean,
records,
errorRecords,
1,
outputLane,
"{\"invalid_json\": yes}\n".getBytes(charset),
true,
true
);
Assert.fail("ExecutionException should have been thrown");
} catch (ExecutionException e) {
assertThat(e.getCause(), instanceOf(RuntimeException.class));
final RuntimeException runtimeException = (RuntimeException) e.getCause();
assertThat(runtimeException.getCause(), instanceOf(StageException.class));
final StageException stageException = (StageException) runtimeException.getCause();
assertThat(stageException.getErrorCode().getCode(), equalTo(Errors.TCP_06.getCode()));
}
}
@Test
public void flumeAvroIpc() throws StageException, IOException, ExecutionException, InterruptedException {
final Charset charset = Charsets.UTF_8;
final TCPServerSourceConfig configBean = createConfigBean(charset);
configBean.tcpMode = TCPMode.FLUME_AVRO_IPC;
configBean.dataFormat = DataFormat.TEXT;
configBean.bindAddress = "0.0.0.0";
final int batchSize = 5;
final String outputLane = "output";
final TCPServerSource source = new TCPServerSource(configBean);
final PushSourceRunner runner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.TO_ERROR)
.build();
runner.runInit();
runner.runProduce(Collections.emptyMap(), batchSize, out -> {
final Map<String, List<Record>> outputMap = out.getRecords();
assertThat(outputMap, hasKey(outputLane));
final List<Record> records = outputMap.get(outputLane);
assertThat(records, hasSize(batchSize));
for (int i = 0; i < batchSize; i++) {
assertThat(
records.get(i).get("/" + TextDataParserFactory.TEXT_FIELD_NAME),
fieldWithValue(getFlumeAvroIpcEventName(i))
);
}
runner.setStop();
});
final AvroSourceProtocol client = SpecificRequestor.getClient(AvroSourceProtocol.class, new NettyTransceiver(new InetSocketAddress("localhost", Integer.parseInt(configBean.ports.get(0)))));
List<AvroFlumeEvent> events = new LinkedList<>();
for (int i = 0; i < batchSize; i++) {
AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setHeaders(new HashMap<CharSequence, CharSequence>());
avroEvent.setBody(ByteBuffer.wrap(getFlumeAvroIpcEventName(i).getBytes()));
events.add(avroEvent);
}
Status status = client.appendBatch(events);
assertThat(status, equalTo(Status.OK));
runner.waitOnProduce();
}
private static String getFlumeAvroIpcEventName(int index) {
return "Avro event " + index;
}
private void runAndCollectRecords(
PushSourceRunner runner,
TCPServerSourceConfig configBean,
List<Record> records,
List<Record> errorRecords,
int batchSize,
String outputLane,
byte[] data,
boolean randomlySlice,
boolean runEmptyProduceAtEnd
) throws StageException, InterruptedException, ExecutionException {
runner.runInit();
EventLoopGroup workerGroup = new NioEventLoopGroup();
runner.runProduce(new HashMap<>(), batchSize, output -> {
records.addAll(output.getRecords().get(outputLane));
if (!runEmptyProduceAtEnd) {
runner.setStop();
}
});
ChannelFuture channelFuture = startTcpClient(
configBean,
workerGroup,
data,
randomlySlice
);
// Wait until the connection is closed.
channelFuture.channel().closeFuture().sync();
if (runner.getContext().getOnErrorRecord() != OnRecordError.STOP_PIPELINE) {
runner.setStop();
}
// wait for the push source runner produce to complete
runner.waitOnProduce();
errorRecords.addAll(runner.getErrorRecords());
if (runEmptyProduceAtEnd) {
runner.runProduce(new HashMap<>(), 0, output -> {
runner.setStop();
});
runner.waitOnProduce();
}
runner.runDestroy();
workerGroup.shutdownGracefully();
}
private ChannelFuture startTcpClient(
TCPServerSourceConfig configBean,
EventLoopGroup workerGroup,
byte[] data,
boolean randomlySlice
) throws
InterruptedException {
ChannelFuture channelFuture;
Bootstrap bootstrap = new Bootstrap();
bootstrap.group(workerGroup);
bootstrap.channel(NioSocketChannel.class);
bootstrap.remoteAddress(new InetSocketAddress("localhost", Integer.parseInt(configBean.ports.get(0))));
bootstrap.option(ChannelOption.SO_KEEPALIVE, true);
bootstrap.handler(new ChannelInitializer() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new TCPServerSourceClientHandler(randomlySlice, data));
}
});
// Start the client.
channelFuture = bootstrap.connect().sync();
return channelFuture;
}
private static class TCPServerSourceClientHandler extends ChannelInboundHandlerAdapter {
private final boolean randomlySlice;
private final byte[] data;
private final BlockingQueue<String> responses = new LinkedBlockingDeque<>();
private TCPServerSourceClientHandler(boolean randomlySlice, byte[] data) {
this.randomlySlice = randomlySlice;
this.data = data;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ByteBuf buf = (ByteBuf) msg;
responses.add(buf.toString(com.google.common.base.Charsets.UTF_8));
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
super.channelActive(ctx);
if (randomlySlice) {
for (List<Byte> slice : NetTestUtils.getRandomByteSlices(data)) {
ctx.writeAndFlush(Unpooled.copiedBuffer(Bytes.toArray(slice)));
}
} else {
ctx.writeAndFlush(Unpooled.copiedBuffer(data));
}
}
private String getResponse() throws InterruptedException {
return responses.take();
}
}
private static void initSourceAndValidateIssues(TCPServerSourceConfig configBean, ErrorCode... errorCodes) throws
StageException {
List<Stage.ConfigIssue> issues = initSourceAndGetIssues(configBean);
assertThat(issues, hasSize(errorCodes.length));
for (int i = 0; i < errorCodes.length; i++) {
assertThat(issues.get(i).toString(), containsString(errorCodes[i].getCode()));
}
}
private static List<Stage.ConfigIssue> initSourceAndGetIssues(TCPServerSourceConfig configBean) throws
StageException {
TCPServerSource source = new TCPServerSource(configBean);
PushSourceRunner runner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane("lane")
.setOnRecordError(OnRecordError.TO_ERROR)
.build();
return runner.runValidateConfigs();
}
protected static TCPServerSourceConfig createConfigBean(Charset charset) {
TCPServerSourceConfig config = new TCPServerSourceConfig();
config.batchSize = 10;
config.tlsConfigBean.tlsEnabled = false;
config.numThreads = 1;
config.syslogCharset = charset.name();
config.tcpMode = TCPMode.SYSLOG;
config.syslogFramingMode= SyslogFramingMode.NON_TRANSPARENT_FRAMING;
config.nonTransparentFramingSeparatorCharStr = "\n";
config.maxMessageSize = 4096;
config.ports = randomSinglePort();
config.maxWaitTime = 1000;
config.recordProcessedAckMessage = "record processed";
config.batchCompletedAckMessage = "batch processed";
return config;
}
private static List<String> randomSinglePort() {
return Arrays.asList(String.valueOf(NetworkUtils.getRandomPort()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.caffeine.cache;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import com.github.benmanes.caffeine.cache.Cache;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.caffeine.CaffeineConstants;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CaffeineCacheProducerTest extends CaffeineCacheTestSupport {
// ****************************
// Clear
// ****************************
@Test
void testCacheClear() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedBodiesReceived((Object) null);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_CLEANUP).to("direct://start").send();
assertMockEndpointsSatisfied();
}
// ****************************
// Put
// ****************************
@Test
void testCachePut() {
final String key = generateRandomString();
final String val = generateRandomString();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedBodiesReceived(val);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_PUT)
.withHeader(CaffeineConstants.KEY, key).withBody(val).to("direct://start").send();
assertNotNull(getTestCache().getIfPresent(key));
assertEquals(val, getTestCache().getIfPresent(key));
}
@Test
void testCachePutAll() throws Exception {
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_PUT_ALL).withBody(map)
.to("direct://start").send();
MockEndpoint mock1 = getMockEndpoint("mock:result");
mock1.expectedMinimumMessageCount(1);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
final Map<String, String> elements = getTestCache().getAllPresent(keys);
keys.forEach(k -> {
assertTrue(elements.containsKey(k));
assertEquals(map.get(k), elements.get(k));
});
assertMockEndpointsSatisfied();
}
// ****************************
// Get
// ****************************
@Test
void testCacheGet() throws Exception {
final Cache<Object, Object> cache = getTestCache();
final String key = generateRandomString();
final String val = generateRandomString();
cache.put(key, val);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedBodiesReceived(val);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, true);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_GET)
.withHeader(CaffeineConstants.KEY, key).withBody(val).to("direct://start").send();
assertMockEndpointsSatisfied();
}
@Test
void testCacheGetAll() throws Exception {
final Cache<Object, Object> cache = getTestCache();
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
cache.putAll(map);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, true);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_GET_ALL)
.withHeader(CaffeineConstants.KEYS, keys).to("direct://start").send();
assertMockEndpointsSatisfied();
final Map<String, String> elements = mock.getExchanges().get(0).getIn().getBody(Map.class);
keys.forEach(k -> {
assertTrue(elements.containsKey(k));
assertEquals(map.get(k), elements.get(k));
});
}
//
// ****************************
// INVALIDATE
// ****************************
@Test
void testCacheInvalidate() throws Exception {
final Cache<Object, Object> cache = getTestCache();
final String key = generateRandomString();
final String val = generateRandomString();
cache.put(key, val);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_INVALIDATE)
.withHeader(CaffeineConstants.KEY, key).to("direct://start").send();
assertMockEndpointsSatisfied();
assertNull(cache.getIfPresent(key));
}
@Test
void testCacheInvalidateAll() throws Exception {
final Cache<Object, Object> cache = getTestCache();
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
cache.putAll(map);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_INVALIDATE_ALL)
.withHeader(CaffeineConstants.KEYS, keys).to("direct://start").send();
assertMockEndpointsSatisfied();
final Map<String, String> elements = getTestCache().getAllPresent(keys);
keys.forEach(k -> {
assertFalse(elements.containsKey(k));
});
}
@Test
void testStats() {
final Map<String, String> map = generateRandomMapOfString(3);
final Set<String> keys = map.keySet().stream().limit(2).collect(Collectors.toSet());
fluentTemplate().withHeader(CaffeineConstants.ACTION, CaffeineConstants.ACTION_PUT_ALL).withBody(map)
.to("direct://start").send();
MockEndpoint mock1 = getMockEndpoint("mock:result");
mock1.expectedMinimumMessageCount(1);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_HAS_RESULT, false);
mock1.expectedHeaderReceived(CaffeineConstants.ACTION_SUCCEEDED, true);
final Map<String, String> elements = getTestCache().getAllPresent(keys);
keys.forEach(k -> {
assertTrue(elements.containsKey(k));
assertEquals(map.get(k), elements.get(k));
});
}
// ****************************
// Route
// ****************************
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct://start")
.to("caffeine-cache://cache")
.to("log:org.apache.camel.component.caffeine?level=INFO&showAll=true&multiline=true")
.to("mock:result");
}
};
}
}
| |
/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.etch.tests;
import org.apache.etch.bindings.java.util.StrIntHashMap;
import org.apache.etch.tests.Test1.E1;
import org.apache.etch.tests.Test1.S1;
import org.apache.etch.tests.Test1.S2;
import org.apache.etch.tests.Test1.S3;
import org.apache.etch.tests.Test1.S4;
import org.junit.Before;
import org.junit.Test;
/**
* Simple tests to ensure that ImplTestClient and ImplTestServer are generated
* correctly
*
* @author gsandhir
*/
public class TestImplTest1
{
private ImplTest1Server testServer = null;
private ImplTest1Client testClient = null;
/** */
@Before public void setup() {
RemoteTest1Client tc = null;
RemoteTest1Server ts = null;
testServer = new ImplTest1Server(tc);
testClient = new ImplTest1Client(ts);
}
/** */
@Test public void testIncr() {
try
{
Integer i = new Integer(5);
testServer.incr(i);
testClient.incr( i );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testNothing() {
try
{
testServer.nothing();
testClient.nothing();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testAdd() {
try
{
Integer i = new Integer(5);
Integer j = new Integer(3);
testServer.add( i,j);
testClient.add( i,j);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testSub() {
try
{
Integer i = new Integer(5);
Integer j = new Integer(3);
testServer.sub( i,j);
testClient.sub( i,j);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testSum() {
try
{
int[] x = {1,2,3};
testServer.sum( x );
testClient.sum( x );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testFill() {
try
{
Integer i = new Integer(5);
Integer j = new Integer(3);
testServer.fill( i, j );
testClient.fill( i,j );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testFillObject() {
try
{
Integer i = new Integer(5);
Object j = null;
testServer.fillObject( i, j );
testClient.fillObject( i,j );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testTrans() {
try
{
Integer i = new Integer(5);
E1 enumA = E1.A;
testServer.trans( enumA,i );
testClient.trans( enumA,i );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testDist() {
try
{
S1 a = new S1();
S1 b = new S1();
testServer.dist( a, b );
testClient.dist( a, b );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** @throws Exception */
@Test public void testBlow() throws Exception {
try
{
Integer i = new Integer(5);
testServer.blow( "", i);
testClient.blow( "", i );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** @throws Exception */
@Test public void testExcp5() throws Exception {
try
{
Integer i = new Integer(5);
Object value = null;
testServer.throwExcp5( "", i, value );
testClient.throwExcp5( "", i, value );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** @throws Exception */
@Test public void testExcp6() throws Exception {
try
{
Integer i = new Integer(5);
Object value[] = null;
testServer.throwExcp6( "", i, value );
testClient.throwExcp6( "", i, value );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testBoolean() {
try
{
Boolean b = null;
testServer.p_boolean( b );
testClient.p_boolean( b );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testBooleanArray() {
try
{
boolean b[] = null;
testServer.p_boolean_array( b );
testClient.p_boolean_array( b ) ;
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testByte() {
try
{
Byte a = null;
testServer.p_byte( a );
testClient.p_byte( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testByteArray() {
try
{
byte a[] = null;
testServer.p_byte_array( a );
testClient.p_byte_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testShort() {
try
{
Short a = null;
testServer.p_short( a );
testClient.p_short( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testShortArray() {
try
{
short a[] = null;
testServer.p_short_array( a );
testClient.p_short_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testInt() {
try
{
Integer a = null;
testServer.p_int( a );
testClient.p_int( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testIntArray() {
try
{
int a[] = null;
testServer.p_int_array( a );
testClient.p_int_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testLong() {
try
{
Long a = null;
testServer.p_long( a );
testClient.p_long( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testLongArray() {
try
{
long a[] = null;
testServer.p_long_array( a );
testClient.p_long_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testFloat() {
try
{
Float a = null;
testServer.p_float( a );
testClient.p_float( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testFloatArray() {
try
{
float a[] = null;
testServer.p_float_array( a );
testClient.p_float_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testDouble() {
try
{
Double a = null;
testServer.p_double( a );
testClient.p_double( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testDoubleArray() {
try
{
double a[] = null;
testServer.p_double_array( a );
testClient.p_double_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testString() {
try
{
String a = null;
testServer.p_string( a );
testClient.p_string( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testStringArray() {
try
{
String a[] = null;
testServer.p_string_array( a );
testClient.p_string_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testE1() {
try
{
E1 a = null;
testServer.p_E1( a );
testClient.p_E1( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testE1Array() {
try
{
E1 a[] = null;
testServer.p_E1_array( a );
testClient.p_E1_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testS1() {
try
{
S1 a = null;
testServer.p_S1( a );
testClient.p_S1( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testS1Array() {
try
{
S1 a[] = null;
testServer.p_S1_array( a );
testClient.p_S1_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testS2() {
try
{
S2 a = null;
testServer.p_S2( a );
testClient.p_S2( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testS2Array() {
try
{
S2 a[] = null;
testServer.p_S2_array( a );
testClient.p_S2_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testBlob() {
try
{
StrIntHashMap a = null;
testServer.p_Blob( a );
testClient.p_Blob( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testBlobArray() {
try
{
StrIntHashMap[] a = null;
testServer.p_Blob_array( a );
testClient.p_Blob_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testObject() {
try
{
Object a = null;
testServer.p_object( a );
testClient.p_object( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testObjectArray() {
try
{
Object[] a = null;
testServer.p_object_array( a );
testClient.p_object_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testObjectStruct() {
try
{
S3 a = null;
testServer.p_object_struct( a );
testClient.p_object_struct( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testObjectStructArray() {
try
{
S4 a = null;
testServer.p_object_struct_array( a );
testClient.p_object_struct_array( a );
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testAlwaysWork() {
try
{
testServer.alwaysWorks();
testClient.alwaysWorks();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testAlwaysFail() {
try
{
testServer.alwaysFails();
testClient.alwaysFails();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testIsTrue() {
try
{
testServer.isTrue();
testClient.isTrue();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testIsFalse() {
try
{
testServer.isFalse();
testClient.isFalse();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM1() {
try
{
testServer.m1();
testClient.m1();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM1() {
try
{
testServer.can_m1();
testClient.can_m1();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM2() {
try
{
testServer.m2();
testClient.m2();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM2() {
try
{
Boolean b = null;
testServer.can_m2(b);
testClient.can_m2(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM3() {
try
{
testServer.m3();
testClient.m3();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM3() {
try
{
Boolean b = null;
testServer.can_m3(b);
testClient.can_m3(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM4() {
try
{
testServer.m4();
testClient.m4();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM4() {
try
{
Boolean b = null;
testServer.can_m4(b);
testClient.can_m4(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM5() {
try
{
testServer.m5();
testClient.m5();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM5() {
try
{
Byte b = null;
testServer.can_m5(b);
testClient.can_m5(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM6() {
try
{
testServer.m6();
testClient.m6();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM6() {
try
{
Short b = null;
testServer.can_m6(b);
testClient.can_m6(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM7() {
try
{
testServer.m7();
testClient.m7();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM7() {
try
{
Integer b = null;
testServer.can_m7(b);
testClient.can_m7(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM8() {
try
{
testServer.m8();
testClient.m8();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM8() {
try
{
Long b = null;
testServer.can_m8(b);
testClient.can_m8(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM9() {
try
{
testServer.m6();
testClient.m6();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM9() {
try
{
Float b = null;
testServer.can_m9(b);
testClient.can_m9(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM10() {
try
{
testServer.m10();
testClient.m10();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM10() {
try
{
Double b = null;
testServer.can_m10(b);
testClient.can_m10(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM11() {
try
{
testServer.m11();
testClient.m11();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM11() {
try
{
String b = null;
testServer.can_m11(b);
testClient.can_m11(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM12() {
try
{
testServer.m12();
testClient.m12();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM12() {
try
{
Integer b = null;
testServer.can_m12(b);
testClient.can_m12(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM13() {
try
{
Integer b = null;
testServer.m13(b);
testClient.m13(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM13() {
try
{
Integer b = null;
testServer.can_m13(b);
testClient.can_m13(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM14() {
try
{
S1 b = null;
testServer.m14(b);
testClient.m14(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM14() {
try
{
Integer b = null;
testServer.can_m14(b);
testClient.can_m14(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testM15() {
try
{
testServer.m15();
testClient.m15();
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
/** */
@Test public void testCanM15() {
try
{
E1 b = null;
testServer.can_m15(b);
testClient.can_m15(b);
}
catch ( UnsupportedOperationException e )
{
// ignore
}
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
/*
* Copyright 2010 Mario Zechner (contact@badlogicgames.com), Nathan Sweet (admin@esotericsoftware.com)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.badlogic.gdx.tests.box2d;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.physics.box2d.Body;
import com.badlogic.gdx.physics.box2d.BodyDef;
import com.badlogic.gdx.physics.box2d.BodyDef.BodyType;
import com.badlogic.gdx.physics.box2d.Box2DDebugRenderer;
import com.badlogic.gdx.physics.box2d.Fixture;
import com.badlogic.gdx.physics.box2d.QueryCallback;
import com.badlogic.gdx.physics.box2d.World;
import com.badlogic.gdx.physics.box2d.joints.MouseJoint;
import com.badlogic.gdx.physics.box2d.joints.MouseJointDef;
import com.badlogic.gdx.utils.TimeUtils;
/** Base class for all Box2D Testbed tests, all subclasses must implement the createWorld() method.
*
* @author badlogicgames@gmail.com */
public abstract class Box2DTest implements ApplicationListener, InputProcessor {
/** the camera **/
protected OrthographicCamera camera;
/** the renderer **/
protected Box2DDebugRenderer renderer;
SpriteBatch batch;
BitmapFont font;
/** our box2D world **/
protected World world;
/** ground body to connect the mouse joint to **/
protected Body groundBody;
/** our mouse joint **/
protected MouseJoint mouseJoint = null;
/** a hit body **/
protected Body hitBody = null;
protected abstract void createWorld (World world);
/** temp vector **/
protected Vector2 tmp = new Vector2();
@Override
public void render () {
// update the world with a fixed time step
long startTime = TimeUtils.nanoTime();
world.step(Gdx.app.getGraphics().getDeltaTime(), 3, 3);
float updateTime = (TimeUtils.nanoTime() - startTime) / 1000000000.0f;
startTime = TimeUtils.nanoTime();
// clear the screen and setup the projection matrix
Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
camera.update();
// render the world using the debug renderer
renderer.render(world, camera.combined);
float renderTime = (TimeUtils.nanoTime() - startTime) / 1000000000.0f;
batch.begin();
font.draw(batch, "fps:" + Gdx.graphics.getFramesPerSecond() + ", update: " + updateTime + ", render: " + renderTime, 0, 20);
batch.end();
}
@Override
public void create () {
// setup the camera. In Box2D we operate on a
// meter scale, pixels won't do it. So we use
// an orthographic camera with a viewport of
// 48 meters in width and 32 meters in height.
// We also position the camera so that it
// looks at (0,16) (that's where the middle of the
// screen will be located).
camera = new OrthographicCamera(48, 32);
camera.position.set(0, 15, 0);
// create the debug renderer
renderer = new Box2DDebugRenderer();
// create the world
world = new World(new Vector2(0, -10), true);
// we also need an invisible zero size ground body
// to which we can connect the mouse joint
BodyDef bodyDef = new BodyDef();
groundBody = world.createBody(bodyDef);
// call abstract method to populate the world
createWorld(world);
batch = new SpriteBatch();
font = new BitmapFont(Gdx.files.internal("data/arial-15.fnt"), false);
}
@Override
public void dispose () {
renderer.dispose();
world.dispose();
renderer = null;
world = null;
mouseJoint = null;
hitBody = null;
}
@Override
public boolean keyDown (int keycode) {
return false;
}
@Override
public boolean keyTyped (char character) {
return false;
}
@Override
public boolean keyUp (int keycode) {
return false;
}
/** we instantiate this vector and the callback here so we don't irritate the GC **/
Vector3 testPoint = new Vector3();
QueryCallback callback = new QueryCallback() {
@Override
public boolean reportFixture (Fixture fixture) {
// if the hit point is inside the fixture of the body
// we report it
if (fixture.testPoint(testPoint.x, testPoint.y)) {
hitBody = fixture.getBody();
return false;
} else
return true;
}
};
@Override
public boolean touchDown (int x, int y, int pointer, int button) {
// translate the mouse coordinates to world coordinates
camera.unproject(testPoint.set(x, y, 0));
// ask the world which bodies are within the given
// bounding box around the mouse pointer
hitBody = null;
world.QueryAABB(callback, testPoint.x - 0.0001f, testPoint.y - 0.0001f, testPoint.x + 0.0001f, testPoint.y + 0.0001f);
if (hitBody == groundBody) hitBody = null;
// ignore kinematic bodies, they don't work with the mouse joint
if (hitBody != null && hitBody.getType() == BodyType.KinematicBody) return false;
// if we hit something we create a new mouse joint
// and attach it to the hit body.
if (hitBody != null) {
MouseJointDef def = new MouseJointDef();
def.bodyA = groundBody;
def.bodyB = hitBody;
def.collideConnected = true;
def.target.set(testPoint.x, testPoint.y);
def.maxForce = 1000.0f * hitBody.getMass();
mouseJoint = (MouseJoint)world.createJoint(def);
hitBody.setAwake(true);
}
return false;
}
/** another temporary vector **/
Vector2 target = new Vector2();
@Override
public boolean touchDragged (int x, int y, int pointer) {
// if a mouse joint exists we simply update
// the target of the joint based on the new
// mouse coordinates
if (mouseJoint != null) {
camera.unproject(testPoint.set(x, y, 0));
mouseJoint.setTarget(target.set(testPoint.x, testPoint.y));
}
return false;
}
@Override
public boolean touchUp (int x, int y, int pointer, int button) {
// if a mouse joint exists we simply destroy it
if (mouseJoint != null) {
world.destroyJoint(mouseJoint);
mouseJoint = null;
}
return false;
}
@Override
public boolean mouseMoved (int x, int y) {
return false;
}
@Override
public boolean scrolled (int amount) {
return false;
}
public void pause () {
}
public void resume () {
}
public void resize (int width, int height) {
}
}
| |
/*************************************************************************
* Compilation: javac StdArrayIO.java
* Execution: java StdArrayIO < input.txt
*
* A library for reading in 1D and 2D arrays of integers, doubles,
* and booleans from standard input and printing them out to
* standard output.
*
* % more tinyDouble1D.txt
* 4
* .000 .246 .222 -.032
*
* % more tinyDouble2D.txt
* 4 3
* .000 .270 .000
* .246 .224 -.036
* .222 .176 .0893
* -.032 .739 .270
*
* % more tinyBoolean2D.txt
* 4 3
* 1 1 0
* 0 0 0
* 0 1 1
* 1 1 1
*
* % cat tinyDouble1D.txt tinyDouble2D.txt tinyBoolean2D.txt | java StdArrayIO
* 4
* 0.00000 0.24600 0.22200 -0.03200
*
* 4 3
* 0.00000 0.27000 0.00000
* 0.24600 0.22400 -0.03600
* 0.22200 0.17600 0.08930
* 0.03200 0.73900 0.27000
*
* 4 3
* 1 1 0
* 0 0 0
* 0 1 1
* 1 1 1
*
*************************************************************************/
/**
* <i>Standard array IO</i>. This class provides methods for reading
* in 1D and 2D arrays from standard input and printing out to
* standard output.
* <p>
* For additional documentation, see
* <a href="http://introcs.cs.princeton.edu/22libary">Section 2.2</a> of
* <i>Introduction to Programming in Java: An Interdisciplinary Approach</i>
* by Robert Sedgewick and Kevin Wayne.
*
* @author Robert Sedgewick
* @author Kevin Wayne
*/
public class StdArrayIO {
// it doesn't make sense to instantiate this class
private StdArrayIO() { }
/**
* Read in and return an array of doubles from standard input.
*/
public static double[] readDouble1D() {
int N = StdIn.readInt();
double[] a = new double[N];
for (int i = 0; i < N; i++) {
a[i] = StdIn.readDouble();
}
return a;
}
/**
* Print an array of doubles to standard output.
*/
public static void print(double[] a) {
int N = a.length;
StdOut.println(N);
for (int i = 0; i < N; i++) {
StdOut.printf("%9.5f ", a[i]);
}
StdOut.println();
}
/**
* Read in and return an M-by-N array of doubles from standard input.
*/
public static double[][] readDouble2D() {
int M = StdIn.readInt();
int N = StdIn.readInt();
double[][] a = new double[M][N];
for (int i = 0; i < M; i++) {
for (int j = 0; j < N; j++) {
a[i][j] = StdIn.readDouble();
}
}
return a;
}
/**
* Print the M-by-N array of doubles to standard output.
*/
public static void print(double[][] a) {
int M = a.length;
int N = a[0].length;
StdOut.println(M + " " + N);
for (int i = 0; i < M; i++) {
for (int j = 0; j < N; j++) {
StdOut.printf("%9.5f ", a[i][j]);
}
StdOut.println();
}
}
/**
* Read in and return an array of ints from standard input.
*/
public static int[] readInt1D() {
int N = StdIn.readInt();
int[] a = new int[N];
for (int i = 0; i < N; i++) {
a[i] = StdIn.readInt();
}
return a;
}
/**
* Print an array of ints to standard output.
*/
public static void print(int[] a) {
int N = a.length;
StdOut.println(N);
for (int i = 0; i < N; i++) {
StdOut.printf("%9d ", a[i]);
}
StdOut.println();
}
/**
* Read in and return an M-by-N array of ints from standard input.
*/
public static int[][] readInt2D() {
int M = StdIn.readInt();
int N = StdIn.readInt();
int[][] a = new int[M][N];
for (int i = 0; i < M; i++) {
for (int j = 0; j < N; j++) {
a[i][j] = StdIn.readInt();
}
}
return a;
}
/**
* Print the M-by-N array of ints to standard output.
*/
public static void print(int[][] a) {
int M = a.length;
int N = a[0].length;
StdOut.println(M + " " + N);
for (int i = 0; i < M; i++) {
for (int j = 0; j < N; j++) {
StdOut.printf("%9d ", a[i][j]);
}
StdOut.println();
}
}
/**
* Read in and return an array of booleans from standard input.
*/
public static boolean[] readBoolean1D() {
int N = StdIn.readInt();
boolean[] a = new boolean[N];
for (int i = 0; i < N; i++) {
a[i] = StdIn.readBoolean();
}
return a;
}
/**
* Print an array of booleans to standard output.
*/
public static void print(boolean[] a) {
int N = a.length;
StdOut.println(N);
for (int i = 0; i < N; i++) {
if (a[i]) StdOut.print("1 ");
else StdOut.print("0 ");
}
StdOut.println();
}
/**
* Read in and return an M-by-N array of booleans from standard input.
*/
public static boolean[][] readBoolean2D() {
int M = StdIn.readInt();
int N = StdIn.readInt();
boolean[][] a = new boolean[M][N];
for (int i = 0; i < M; i++) {
for (int j = 0; j < N; j++) {
a[i][j] = StdIn.readBoolean();
}
}
return a;
}
/**
* Print the M-by-N array of booleans to standard output.
*/
public static void print(boolean[][] a) {
int M = a.length;
int N = a[0].length;
StdOut.println(M + " " + N);
for (int i = 0; i < M; i++) {
for (int j = 0; j < N; j++) {
if (a[i][j]) StdOut.print("1 ");
else StdOut.print("0 ");
}
StdOut.println();
}
}
/**
* Test client.
*/
public static void main(String[] args) {
// read and print an array of doubles
double[] a = StdArrayIO.readDouble1D();
StdArrayIO.print(a);
StdOut.println();
// read and print a matrix of doubles
double[][] b = StdArrayIO.readDouble2D();
StdArrayIO.print(b);
StdOut.println();
// read and print a matrix of doubles
boolean[][] d = StdArrayIO.readBoolean2D();
StdArrayIO.print(d);
StdOut.println();
}
}
| |
package org.openur.module.service.security;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.HashSet;
import java.util.UUID;
import javax.inject.Inject;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.openur.domain.testfixture.dummyimpl.MyApplicationImpl;
import org.openur.domain.testfixture.dummyimpl.MyOrgUnitMember;
import org.openur.domain.testfixture.dummyimpl.MyOrgUnit;
import org.openur.domain.testfixture.dummyimpl.MyAuthorizableTechUser;
import org.openur.domain.testfixture.dummyimpl.MyPermissionImpl;
import org.openur.domain.testfixture.dummyimpl.MyPerson;
import org.openur.domain.testfixture.dummyimpl.MyRoleImpl;
import org.openur.domain.testfixture.testobjects.TestObjectContainer;
import org.openur.module.domain.userstructure.orgunit.IOrganizationalUnit;
import org.openur.module.persistence.dao.ISecurityDomainDao;
import org.openur.module.service.config.SecurityTestSpringConfig;
import org.openur.module.service.userstructure.IOrgUnitServices;
import org.openur.module.service.userstructure.IUserServices;
import org.openur.module.util.exception.EntityNotFoundException;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@ActiveProfiles("testSecurityServices")
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = {SecurityTestSpringConfig.class})
public class AuthorizationServicesTest
{
@Inject
private IAuthorizationServices authorizationServices;
@Inject
private IUserServices userServicesMock;
@Inject
private IOrgUnitServices orgUnitServicesMock;
@Inject
private ISecurityDomainDao securityDaoMock;
// fields for arbitrary domain-objects:
private static final String APP_ID = UUID.randomUUID().toString();
private static final String APP_NAME = "appName";
private static final String PERM_ID = UUID.randomUUID().toString();
private static final String PERMISSION_TEXT = "permissionText";
private static final String PERMISSION_IN_SUPER_OU_TEXT = "permissionInSuperOuText";
private static final String PERMISSION_IN_ROOT_OU_TEXT = "permissionInRootOuText";
private static final String OTHER_PERMISSION_TEXT = "otherPermissionText";
private static final String PERSON_ID = UUID.randomUUID().toString();
private static final String PERSON_IN_ROOT_ID = UUID.randomUUID().toString();
private static final String OU_ID = UUID.randomUUID().toString();
private static final String OTHER_OU_ID = UUID.randomUUID().toString();
private static final String SUPER_OU_ID = UUID.randomUUID().toString();
private static final String ROOT_OU_ID = UUID.randomUUID().toString();
private static final String TECH_USER_ID = UUID.randomUUID().toString();
private MyApplicationImpl app;
private MyPermissionImpl permission;
private MyPermissionImpl permissionInSuperOu;
private MyPermissionImpl permissionInRootOu;
private MyPermissionImpl otherPermission;
private MyRoleImpl role;
private MyRoleImpl roleInSuperOu;
private MyRoleImpl roleInRootOu;
private MyPerson person;
private MyPerson personInRoot;
private MyOrgUnitMember member;
private MyOrgUnitMember memberInSuperOu;
private MyOrgUnitMember memberInRootOu;
private MyOrgUnit ou;
private MyOrgUnit otherOu;
private MyOrgUnit superOu;
private MyOrgUnit rootOu;
private MyAuthorizableTechUser techUser;
@Before
public void setUp()
{
Mockito.when(userServicesMock.findPersonById(TestObjectContainer.PERSON_UUID_1)).thenReturn(TestObjectContainer.PERSON_1);
Mockito.when(userServicesMock.findPersonById(TestObjectContainer.PERSON_UUID_3)).thenReturn(TestObjectContainer.PERSON_3);
Mockito.when(userServicesMock.findTechnicalUserById(TestObjectContainer.TECH_USER_UUID_1)).thenReturn(TestObjectContainer.TECH_USER_1);
Mockito.when(orgUnitServicesMock.findOrgUnitById(TestObjectContainer.ORG_UNIT_UUID_A, Boolean.TRUE)).thenReturn(TestObjectContainer.ORG_UNIT_A);
Mockito.when(orgUnitServicesMock.findOrgUnitById(TestObjectContainer.ORG_UNIT_UUID_C, Boolean.TRUE)).thenReturn(TestObjectContainer.ORG_UNIT_C);
Mockito.when(orgUnitServicesMock.findOrgUnitById(TestObjectContainer.SUPER_OU_UUID_1, Boolean.TRUE)).thenReturn(TestObjectContainer.SUPER_OU_1);
Mockito.when(securityDaoMock.findPermission(TestObjectContainer.PERMISSION_1_A.getPermissionText(), TestObjectContainer.APP_A.getApplicationName()))
.thenReturn(TestObjectContainer.PERMISSION_1_A);
Mockito.when(securityDaoMock.findPermission(TestObjectContainer.PERMISSION_2_A.getPermissionText(), TestObjectContainer.APP_A.getApplicationName()))
.thenReturn(TestObjectContainer.PERMISSION_2_A);
Mockito.when(securityDaoMock.findPermission(TestObjectContainer.PERMISSION_1_C.getPermissionText(), TestObjectContainer.APP_C.getApplicationName()))
.thenReturn(TestObjectContainer.PERMISSION_1_C);
Mockito.when(securityDaoMock.findPermission(TestObjectContainer.PERMISSION_2_C.getPermissionText(), TestObjectContainer.APP_C.getApplicationName()))
.thenReturn(TestObjectContainer.PERMISSION_2_C);
// init arbitrary domain-objects:
app = new MyApplicationImpl(APP_ID, APP_NAME);
personInRoot = new MyPerson(PERSON_IN_ROOT_ID, "personInRootNumber");
personInRoot.addApplication(app);
permissionInRootOu = new MyPermissionImpl(UUID.randomUUID().toString(), PERMISSION_IN_ROOT_OU_TEXT, app);
roleInRootOu = new MyRoleImpl(UUID.randomUUID().toString(), "roleInRootOuName");
roleInRootOu.addPermissionSet(app, new HashSet<MyPermissionImpl>(Arrays.asList(permissionInRootOu)));
memberInRootOu = new MyOrgUnitMember(personInRoot, ROOT_OU_ID);
memberInRootOu.addRole(roleInRootOu);
rootOu = new MyOrgUnit(ROOT_OU_ID, "rootOuNumber");
rootOu.addMember(memberInRootOu);
person = new MyPerson(PERSON_ID, "personNumber");
person.addApplication(app);
permissionInSuperOu = new MyPermissionImpl(UUID.randomUUID().toString(), PERMISSION_IN_SUPER_OU_TEXT, app);
roleInSuperOu = new MyRoleImpl(UUID.randomUUID().toString(), "roleInSuperOuName");
roleInSuperOu.addPermissionSet(app, new HashSet<MyPermissionImpl>(Arrays.asList(permissionInSuperOu)));
memberInSuperOu = new MyOrgUnitMember(person, SUPER_OU_ID);
memberInSuperOu.addRole(roleInSuperOu);
superOu = new MyOrgUnit(SUPER_OU_ID, "superOuNumber");
superOu.setSuperOrgUnit(rootOu);
superOu.setRootOrgUnit(rootOu);
superOu.addMember(memberInSuperOu);
permission = new MyPermissionImpl(PERM_ID, PERMISSION_TEXT, app);
role = new MyRoleImpl(UUID.randomUUID().toString(), "roleName");
role.addPermissionSet(app, new HashSet<MyPermissionImpl>(Arrays.asList(permission)));
member = new MyOrgUnitMember(person, OU_ID);
member.addRole(role);
ou = new MyOrgUnit(OU_ID, "ouNumber");
ou.setSuperOrgUnit(superOu);
ou.setRootOrgUnit(rootOu);
ou.addMember(member);
otherPermission = new MyPermissionImpl(UUID.randomUUID().toString(), OTHER_PERMISSION_TEXT, app);
otherOu = new MyOrgUnit(OTHER_OU_ID, "otherOuNumber");
otherOu.setSuperOrgUnit(superOu);
techUser = new MyAuthorizableTechUser(TECH_USER_ID, "techUserNumber");
techUser.addPermissionSet(app, new HashSet<MyPermissionImpl>(Arrays.asList(permission, otherPermission)));
Mockito.when(securityDaoMock.findPermission(PERMISSION_TEXT, APP_NAME)).thenReturn(permission);
Mockito.when(securityDaoMock.findPermission(PERMISSION_IN_SUPER_OU_TEXT, APP_NAME)).thenReturn(permissionInSuperOu);
Mockito.when(securityDaoMock.findPermission(OTHER_PERMISSION_TEXT, APP_NAME)).thenReturn(otherPermission);
Mockito.when(securityDaoMock.findPermission(PERMISSION_IN_ROOT_OU_TEXT, APP_NAME)).thenReturn(permissionInRootOu);
Mockito.when(userServicesMock.findPersonById(PERSON_ID)).thenReturn(person);
Mockito.when(userServicesMock.findPersonById(PERSON_IN_ROOT_ID)).thenReturn(personInRoot);
Mockito.when(orgUnitServicesMock.findOrgUnitById(SUPER_OU_ID, Boolean.TRUE)).thenReturn(superOu);
Mockito.when(orgUnitServicesMock.findOrgUnitById(OU_ID, Boolean.TRUE)).thenReturn(ou);
Mockito.when(orgUnitServicesMock.findOrgUnitById(OTHER_OU_ID, Boolean.TRUE)).thenReturn(otherOu);
Mockito.when(orgUnitServicesMock.obtainRootOrgUnits()).thenReturn(new HashSet<IOrganizationalUnit>(Arrays.asList(rootOu)));
Mockito.when(userServicesMock.findTechnicalUserById(TECH_USER_ID)).thenReturn(techUser);
}
@Test
public void testHasPermissionInOrgUnit()
throws EntityNotFoundException
{
// test with standard open-ur domain-objects:
assertTrue(authorizationServices.hasPermission(
TestObjectContainer.PERSON_UUID_1, TestObjectContainer.ORG_UNIT_UUID_A,
TestObjectContainer.PERMISSION_1_A.getPermissionText(), TestObjectContainer.APP_A.getApplicationName()));
// test with arbitrary domain-objects:
assertTrue(authorizationServices.hasPermission(PERSON_ID, OU_ID, PERMISSION_TEXT, app.getApplicationName()));
}
@Test
public void testMemberInOrgUnitButHasNotPermission()
throws EntityNotFoundException
{
// test with standard open-ur domain-objects:
assertFalse(authorizationServices.hasPermission(
TestObjectContainer.PERSON_UUID_1, TestObjectContainer.ORG_UNIT_UUID_A,
TestObjectContainer.PERMISSION_1_C.getPermissionText(), TestObjectContainer.APP_C.getApplicationName()));
// test with arbitrary domain-objects:
assertFalse(authorizationServices.hasPermission(PERSON_ID, OU_ID, OTHER_PERMISSION_TEXT, APP_NAME));
}
@Test
public void testNotMemberInOrgUnitAndThusHasNotPermission()
throws EntityNotFoundException
{
// test with standard open-ur domain-objects:
assertFalse(authorizationServices.hasPermission(
TestObjectContainer.PERSON_UUID_1, TestObjectContainer.ORG_UNIT_UUID_C,
TestObjectContainer.PERMISSION_1_C.getPermissionText(), TestObjectContainer.APP_C.getApplicationName()));
// test with arbitrary domain-objects:
assertFalse(authorizationServices.hasPermission(PERSON_ID, OTHER_OU_ID, OTHER_PERMISSION_TEXT, APP_NAME));
}
@Test
public void testHasPermissionInSuperOrgUnit()
throws EntityNotFoundException
{
// test with standard open-ur domain-objects:
assertTrue(authorizationServices.hasPermission(
TestObjectContainer.PERSON_UUID_3, TestObjectContainer.ORG_UNIT_UUID_A,
TestObjectContainer.PERMISSION_2_C.getPermissionText(), TestObjectContainer.APP_C.getApplicationName()));
// test with arbitrary domain-objects:
assertTrue(authorizationServices.hasPermission(PERSON_ID, OTHER_OU_ID, PERMISSION_IN_SUPER_OU_TEXT, APP_NAME));
}
@Test
public void testCheckSystemWidePermissions()
throws EntityNotFoundException
{
// only test with arbitrary objects:
// has permmission:
assertTrue(authorizationServices.hasPermission(PERSON_IN_ROOT_ID, PERMISSION_IN_ROOT_OU_TEXT, APP_NAME));
// has not permission:
assertFalse(authorizationServices.hasPermission(PERSON_IN_ROOT_ID, OTHER_PERMISSION_TEXT, APP_NAME));
}
@Test
public void testHasPermissionTechUser()
throws EntityNotFoundException
{
// test with standard open-ur domain-objects:
assertTrue(authorizationServices.hasPermissionTechUser(TestObjectContainer.TECH_USER_UUID_1,
TestObjectContainer.PERMISSION_1_A.getPermissionText(), TestObjectContainer.APP_A.getApplicationName()));
assertTrue(authorizationServices.hasPermissionTechUser(TestObjectContainer.TECH_USER_UUID_1,
TestObjectContainer.PERMISSION_2_A.getPermissionText(), TestObjectContainer.APP_A.getApplicationName()));
assertFalse(authorizationServices.hasPermissionTechUser(TestObjectContainer.TECH_USER_UUID_1,
TestObjectContainer.PERMISSION_1_C.getPermissionText(), TestObjectContainer.APP_C.getApplicationName()));
// test with arbitrary domain-objects:
assertTrue(authorizationServices.hasPermissionTechUser(TECH_USER_ID, PERMISSION_TEXT, APP_NAME));
assertTrue(authorizationServices.hasPermissionTechUser(TECH_USER_ID, OTHER_PERMISSION_TEXT, APP_NAME));
assertFalse(authorizationServices.hasPermissionTechUser(TECH_USER_ID, PERMISSION_IN_SUPER_OU_TEXT, APP_NAME));
}
@Test(expected=EntityNotFoundException.class)
public void testNoPersonFoundForId()
throws EntityNotFoundException
{
authorizationServices.hasPermission("someUnknownPersonId", SUPER_OU_ID, PERMISSION_TEXT, app.getApplicationName());
}
@Test(expected=EntityNotFoundException.class)
public void testNoOrgUnitFoundForId()
throws EntityNotFoundException
{
authorizationServices.hasPermission(PERSON_ID, "someUnknownOuId", PERMISSION_TEXT, app.getApplicationName());
}
@Test(expected=EntityNotFoundException.class)
public void testNoPermissionFoundWithGivenText()
throws EntityNotFoundException
{
authorizationServices.hasPermission(PERSON_ID, SUPER_OU_ID, "someUnknownPermissionText", app.getApplicationName());
}
@Test(expected=EntityNotFoundException.class)
public void testNoApplicationFoundWithGivenName()
throws EntityNotFoundException
{
authorizationServices.hasPermission(PERSON_ID, OU_ID, PERMISSION_TEXT, "someUnknownApplicationName");
}
@Test(expected=EntityNotFoundException.class)
public void testNoTechUserFoundForId()
throws EntityNotFoundException
{
authorizationServices.hasPermissionTechUser("someUnknownTechUserId", PERMISSION_TEXT, app.getApplicationName());
}
@Test(expected=EntityNotFoundException.class)
public void testCheckPermissionTechUserButGivePersonId()
throws EntityNotFoundException
{
// check tech-user-permission with valid permission-text but with person-id:
authorizationServices.hasPermissionTechUser(PERSON_ID, PERMISSION_TEXT, app.getApplicationName());
}
}
| |
package it.angelic.soulissclient.model.db;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import android.util.SparseArray;
import java.io.File;
import java.sql.SQLDataException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import im.dacer.androidcharts.ClockPieHelper;
import it.angelic.soulissclient.BuildConfig;
import it.angelic.soulissclient.Constants;
import it.angelic.soulissclient.R;
import it.angelic.soulissclient.SoulissApp;
import it.angelic.soulissclient.fragments.TimeRangeEnum;
import it.angelic.soulissclient.helpers.SoulissPreferenceHelper;
import it.angelic.soulissclient.model.ISoulissTypical;
import it.angelic.soulissclient.model.SoulissCommand;
import it.angelic.soulissclient.model.SoulissNode;
import it.angelic.soulissclient.model.SoulissScene;
import it.angelic.soulissclient.model.SoulissTag;
import it.angelic.soulissclient.model.SoulissTrigger;
import it.angelic.soulissclient.model.SoulissTypical;
import it.angelic.soulissclient.model.SoulissTypicalFactory;
import it.angelic.soulissclient.model.typicals.SoulissTypical41AntiTheft;
import it.angelic.soulissclient.model.typicals.SoulissTypical42AntiTheftPeer;
import it.angelic.soulissclient.model.typicals.SoulissTypical43AntiTheftLocalPeer;
import it.angelic.soulissclient.util.FontAwesomeUtil;
import it.angelic.soulissclient.util.LauncherElementEnum;
import static it.angelic.soulissclient.Constants.MASSIVE_NODE_ID;
import static it.angelic.soulissclient.Constants.TAG;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
/**
* Classe helper per l'esecuzione di interrogazioni al DB, Inserimenti eccetera
*
* @author Ale
*/
public class SoulissDBHelper {
// Database fields
protected static SQLiteDatabase database;
protected static SoulissDBOpenHelper soulissDatabase;
protected final Context context;
protected SoulissPreferenceHelper opts;
public SoulissDBHelper(Context context) {
soulissDatabase = new SoulissDBOpenHelper(context);
opts = SoulissApp.getOpzioni();
this.context = context;
}
public static synchronized SQLiteDatabase getDatabase() {
return database;
}
public static synchronized void open() throws SQLException {
if (database == null || !database.isOpen())
database = soulissDatabase.getWritableDatabase();
}
public void clean() {
if (database != null && database.isOpen()) {
database.execSQL("VACUUM");
} else
Log.w(TAG, "DB closed, clean() failed");
}
public void close() {
soulissDatabase.close();
if (database != null && database.isOpen())
database.close();
else
Log.w(TAG, "DB already closed");
}
public int countFavourites() {
if (!database.isOpen())
open();
Cursor mCount = database.rawQuery("select count(1) from " + SoulissDBOpenHelper.TABLE_TAGS_TYPICALS + " where "
+ SoulissDBOpenHelper.COLUMN_TAG_TYP_TAG_ID + " = " + SoulissDBOpenHelper.FAVOURITES_TAG_ID, null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
public int countNodes() {
Cursor mCount = database.rawQuery("select count(1) from " + SoulissDBOpenHelper.TABLE_NODES, null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
public int countPrograms() {
if (!database.isOpen())
open();
Cursor mCount = database.rawQuery("select count(*) from " + SoulissDBOpenHelper.TABLE_COMMANDS + " WHERE ("
+ SoulissDBOpenHelper.COLUMN_COMMAND_EXECTIME + " is null OR " + SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " ="
+ Constants.COMMAND_COMEBACK_CODE + " OR " + SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " ="
+ Constants.COMMAND_GOAWAY_CODE + " OR " + SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " ="
+ Constants.COMMAND_TRIGGERED + ") AND " + SoulissDBOpenHelper.COLUMN_COMMAND_SCENEID + " IS NULL", null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
public int countScenes() {
if (!database.isOpen())
open();
Cursor mCount = database.rawQuery("select count(*) from " + SoulissDBOpenHelper.TABLE_SCENES, null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
public int countTags() {
if (!database.isOpen())
open();
Cursor mCount = database.rawQuery("select count(1) from " + SoulissDBOpenHelper.TABLE_TAGS, null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
public int countTriggers() {
if (!database.isOpen())
open();
Cursor mCount = database.rawQuery("select count(*) from " + SoulissDBOpenHelper.TABLE_TRIGGERS, null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
public int countTypicalTags() {
if (!database.isOpen())
open();
Cursor mCount = database.rawQuery("select count(1) from " + SoulissDBOpenHelper.TABLE_TAGS_TYPICALS, null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
/**
* Conta i non vuoti diversi da Souliss_T_related
*
* @return
*/
public int countTypicals() {
Cursor mCount = database.rawQuery("select count(1) from " + SoulissDBOpenHelper.TABLE_TYPICALS + " where "
+ SoulissDBOpenHelper.COLUMN_TYPICAL + " <> "
+ Constants.Typicals.Souliss_T_related
+ " AND " + SoulissDBOpenHelper.COLUMN_TYPICAL + " <> "
+ Constants.Typicals.Souliss_T_empty, null);
mCount.moveToFirst();
int count = mCount.getInt(0);
mCount.close();
return count;
}
/**
* campi singoli altrimenti side effects
*
* @param nodeIN
* @return
*/
public int createOrUpdateNode(SoulissNode nodeIN) {
ContentValues values = new ContentValues();
// wrap values from object
values.put(SoulissDBOpenHelper.COLUMN_NODE_NAME, nodeIN.getName());
values.put(SoulissDBOpenHelper.COLUMN_NODE_LASTMOD, Calendar.getInstance().getTime().getTime());
values.put(SoulissDBOpenHelper.COLUMN_NODE_ID, nodeIN.getNodeId());
values.put(SoulissDBOpenHelper.COLUMN_NODE_HEALTH, nodeIN.getHealth());
values.put(SoulissDBOpenHelper.COLUMN_NODE_ICON, nodeIN.getIconResourceId());
int upd = database.update(SoulissDBOpenHelper.TABLE_NODES, values, SoulissDBOpenHelper.COLUMN_NODE_ID + " = " + nodeIN.getNodeId(),
null);
if (upd == 0) {
long insertId = database.insert(SoulissDBOpenHelper.TABLE_NODES, null, values);
}
return upd;
}
/**
* Crea un nuovo scenario vuoto
*
* @param nodeIN
* @return
*/
public int createOrUpdateScene(SoulissScene nodeIN) {
ContentValues values = new ContentValues();
if (nodeIN != null) {
// wrap values from object
values.put(SoulissDBOpenHelper.COLUMN_SCENE_ID, nodeIN.getId());
values.put(SoulissDBOpenHelper.COLUMN_SCENE_ICON, nodeIN.getIconResourceId());
values.put(SoulissDBOpenHelper.COLUMN_SCENE_NAME, nodeIN.toString());
return database.update(SoulissDBOpenHelper.TABLE_SCENES, values, SoulissDBOpenHelper.COLUMN_SCENE_ID + " = " + nodeIN.getId(),
null);
} else {
values.put(SoulissDBOpenHelper.COLUMN_SCENE_ICON, FontAwesomeUtil.getCodeIndexByFontName(context, "fa-moon-o"));
// Inserisco e risetto il nome
int ret = (int) database.insert(SoulissDBOpenHelper.TABLE_SCENES, null, values);
values.put(SoulissDBOpenHelper.COLUMN_SCENE_NAME,
context.getResources().getString(R.string.scene) + " " + ret);
database.update(SoulissDBOpenHelper.TABLE_SCENES, values, SoulissDBOpenHelper.COLUMN_SCENE_ID + " = " + ret, null);
return ret;
}
}
/*
* public void deleteTypical(int nodeid, SoulissTypical comment) { // long
* id = comment.getNodeId(); // System.out.println("Comment deleted with id: " +
* id); database.delete(SoulissDB.TABLE_TYPICALS, SoulissDB.COLUMN_ID +
* " = " + nodeid, null); }
*/
public int deleteCommand(SoulissCommand toRename) {
return database.delete(SoulissDBOpenHelper.TABLE_COMMANDS, SoulissDBOpenHelper.COLUMN_COMMAND_ID + " = "
+ toRename.getCommandId(), null);
}
public int deleteScene(SoulissScene toBeDeleted) {
database.delete(SoulissDBOpenHelper.TABLE_COMMANDS, SoulissDBOpenHelper.COLUMN_COMMAND_SCENEID + " = " + toBeDeleted.getId(), null);
//CASCADE Launcher
database.delete(SoulissDBOpenHelper.TABLE_LAUNCHER,
SoulissDBOpenHelper.COLUMN_LAUNCHER_TYPE + " = " + LauncherElementEnum.SCENE.ordinal()
+ " AND " + SoulissDBOpenHelper.COLUMN_LAUNCHER_SCENE_ID + " = " + toBeDeleted.getId(), null);
return database.delete(SoulissDBOpenHelper.TABLE_SCENES, SoulissDBOpenHelper.COLUMN_SCENE_ID + " = " + toBeDeleted.getId(), null);
}
public int deleteTag(SoulissTag toBeDeleted) {
//CASCADE sulle associazioni
database.delete(SoulissDBOpenHelper.TABLE_TAGS_TYPICALS, SoulissDBOpenHelper.COLUMN_TAG_TYP_TAG_ID + " = " + toBeDeleted.getTagId(), null);
//CASCADE sui figli
database.delete(SoulissDBOpenHelper.TABLE_TAGS, SoulissDBOpenHelper.COLUMN_TAG_FATHER_ID + " = " + toBeDeleted.getTagId(), null);
//CASCADE Launcher
database.delete(SoulissDBOpenHelper.TABLE_LAUNCHER,
SoulissDBOpenHelper.COLUMN_LAUNCHER_TYPE + " = " + LauncherElementEnum.TAG.ordinal()
+ " AND " + SoulissDBOpenHelper.COLUMN_LAUNCHER_TAG_ID + " = " + toBeDeleted.getTagId(), null);
return database.delete(SoulissDBOpenHelper.TABLE_TAGS, SoulissDBOpenHelper.COLUMN_TAG_ID + " = " + toBeDeleted.getTagId(), null);
}
public int deleteTagTypical(long tagId, int nodeid, int slot) {
//elimino associazione
return database.delete(SoulissDBOpenHelper.TABLE_TAGS_TYPICALS, SoulissDBOpenHelper.COLUMN_TAG_TYP_TAG_ID + " = " + tagId
+ " AND " + SoulissDBOpenHelper.COLUMN_TAG_TYP_NODE_ID + " = " + nodeid
+ " AND " + SoulissDBOpenHelper.COLUMN_TAG_TYP_SLOT + " = " + slot, null);
}
public List<SoulissNode> getAllNodes() {
List<SoulissNode> comments = new ArrayList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_NODES, SoulissDBOpenHelper.ALLCOLUMNS_NODES, null, null, null, null,
SoulissDBOpenHelper.COLUMN_NODE_ID);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissNode comment = SoulissNode.cursorToNode(context, cursor);
List<SoulissTypical> cod = getNodeTypicals(comment);
// come sono grezzo, 'sta riga fa schifo
for (SoulissTypical soulissTypical : cod) {
// if (soulissTypical.getNodeId() == comment.getNodeId())
comment.addTypical(soulissTypical);
}
comments.add(comment);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
public List<SoulissTrigger> getAllTriggers() {
List<SoulissTrigger> ret = new ArrayList<>();
// Cursor cursor = database.query(SoulissDB.TABLE_TRIGGERS,
// SoulissDB.ALLCOLUMNS_TRIGGERS, null, null, null, null,null);
// String MY_QUERY =
// "SELECT * FROM "+SoulissDB.TABLE_TRIGGERS+" a INNER JOIN "+SoulissDB.TABLE_COMMANDS
// +" b ON a.cmdid = b.cmdid WHERE b.property_id=?";
String MY_QUERY = "SELECT * FROM " + SoulissDBOpenHelper.TABLE_TRIGGERS + " a " + "INNER JOIN "
+ SoulissDBOpenHelper.TABLE_COMMANDS + " b ON a." + SoulissDBOpenHelper.COLUMN_TRIGGER_COMMAND_ID + " = b."
+ SoulissDBOpenHelper.COLUMN_COMMAND_ID;
Cursor cursor = database.rawQuery(MY_QUERY, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissCommandDTO cmd = new SoulissCommandDTO(cursor);
SoulissTypical tgt = getTypical(cmd.getNodeId(), cmd.getSlot());
SoulissTrigger cols = new SoulissTrigger(cmd, tgt);
SoulissTriggerDTO comment = new SoulissTriggerDTO(cursor);
cols.setTriggerDTO(comment);
ret.add(cols);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return ret;
}
/**
* DB typical factory
*
* @return produced Typical
*/
public List<SoulissTypical> getAllTypicals() {
List<SoulissTypical> comments = new ArrayList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TYPICALS, null, null, null, null, SoulissDBOpenHelper.COLUMN_TYPICAL_NODE_ID + " , " + SoulissDBOpenHelper.COLUMN_TYPICAL_SLOT);
cursor.moveToFirst();
List<SoulissNode> nodi = getAllNodes();
while (!cursor.isAfterLast()) {
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
SoulissTypical newTyp = SoulissTypicalFactory.getTypical(context, dto.getTypical(), nodi.get(dto.getNodeId()), dto, opts);
//TAG? no join, perche 1 a n
Cursor typTags = database.query(SoulissDBOpenHelper.TABLE_TAGS_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TAGS_TYPICAL,
SoulissDBOpenHelper.COLUMN_TAG_TYP_NODE_ID + " = " + dto.getNodeId()
+ " AND " + SoulissDBOpenHelper.COLUMN_TAG_TYP_SLOT + " = " + dto.getSlot(),
null, null, null, null);
typTags.moveToFirst();
while (!typTags.isAfterLast()) {
int tagId = typTags.getInt(typTags.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_TYP_TAG_ID));
if (tagId == SoulissDBOpenHelper.FAVOURITES_TAG_ID)
dto.setFavourite(true);
else
dto.setTagged(true);
typTags.moveToNext();
}
typTags.close();
//hack dto ID, could be different if parent is massive
newTyp.getTypicalDTO().setNodeId(dto.getNodeId());
newTyp.setParentNode(nodi.get(dto.getNodeId()));
// if (newTyp.getTypical() !=
// Constants.Souliss_T_CurrentSensor_slave)
comments.add(newTyp);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
/**
* Return antitheft master typical
*
* @return produced Typical
*/
public SoulissTypical41AntiTheft getAntiTheftMasterTypical() {
// query with primary key
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TYPICALS,
SoulissDBOpenHelper.COLUMN_TYPICAL + " = "
+ Constants.Typicals.Souliss_T41_Antitheft_Main, null, null,
null, null);
if (cursor.moveToFirst()) {
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
SoulissTypical41AntiTheft ret = (SoulissTypical41AntiTheft) SoulissTypicalFactory.getTypical(context, dto.getTypical(),
getSoulissNode(dto.getNodeId()), dto, opts);
cursor.close();
return ret;
} else
throw new NoSuchElementException();
}
public List<SoulissTypical> getAntiTheftSensors() {
List<SoulissTypical> comments = new ArrayList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TYPICALS,
SoulissDBOpenHelper.COLUMN_TYPICAL + " = "
+ Constants.Typicals.Souliss_T42_Antitheft_Peer, null, null,
null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
SoulissNode parent = getSoulissNode(dto.getNodeId());
SoulissTypical42AntiTheftPeer newTyp = (SoulissTypical42AntiTheftPeer) SoulissTypicalFactory.getTypical(context,
dto.getTypical(), parent, dto, opts);
newTyp.setParentNode(parent);
// if (newTyp.getTypical() !=
// Constants.Souliss_T_CurrentSensor_slave)
comments.add(newTyp);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
cursor = database.query(SoulissDBOpenHelper.TABLE_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TYPICALS, SoulissDBOpenHelper.COLUMN_TYPICAL
+ " = " + Constants.Typicals.Souliss_T43_Antitheft_LocalPeer, null,
null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
SoulissNode parent = getSoulissNode(dto.getNodeId());
SoulissTypical43AntiTheftLocalPeer newTyp = (SoulissTypical43AntiTheftLocalPeer) SoulissTypicalFactory.getTypical(context, dto.getTypical(), parent, dto, opts);
newTyp.setParentNode(parent);
// if (newTyp.getTypical() !=
// Constants.Souliss_T_CurrentSensor_slave)
comments.add(newTyp);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
/**
* Raggruppa i risultati di una query sui logs
*
* @param tgt tipico da interrogare
* @param groupBy mese o giorno
* @param range dei log da selezionare
* @return
*/
public SparseArray<SoulissGraphData> getGroupedTypicalLogs(ISoulissTypical tgt, String groupBy, int range) {
SparseArray<SoulissGraphData> comments = new SparseArray<>();
String limitCause = "";
Calendar now = Calendar.getInstance();
switch (range) {
case 0:
// tutti i dati
break;
case 2:
now.add(Calendar.DATE, -7);
limitCause = " AND " + SoulissDBOpenHelper.COLUMN_LOG_DATE + " > " + now.getTime().getTime();
break;
case 1:
now.add(Calendar.MONTH, -1);
limitCause = " AND " + SoulissDBOpenHelper.COLUMN_LOG_DATE + " > " + now.getTime().getTime();
break;
default:
Log.e("DB", "Unexpected switch ERROR");
break;
}
int tot;
if (groupBy.compareTo("%m") == 0)
tot = 12;
else if (groupBy.compareTo("%w") == 0)
tot = 7;
else
tot = 24;
for (int i = 0; i < tot; i++) {
comments.put(i, new SoulissGraphData());
}
Log.d(Constants.TAG, "QUERY GROUPED:");
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_LOGS, new String[]{
"strftime('" + groupBy +
"', datetime((cldlogwhen/1000), 'unixepoch', 'localtime')) AS IDX",
"AVG(CAST(flologval AS FLOAT)) AS AVG",
"MIN(CAST(flologval AS FLOAT)) AS MIN",
"MAX(CAST(flologval AS FLOAT)) AS MAX"},
SoulissDBOpenHelper.COLUMN_LOG_NODE_ID
+ " = "// selection
+ tgt.getNodeId() + " AND " +
SoulissDBOpenHelper.COLUMN_LOG_SLOT + " = "
+ tgt.getSlot() + limitCause + " ",
null,// String[] selectionArgs
"strftime('" + groupBy// GROUP BY
+ "', datetime((cldlogwhen/1000), 'unixepoch', 'localtime'))",
null, // HAVING
"IDX ASC");// ORDER BY
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissGraphData dto = new SoulissGraphData(cursor);
//assertEquals(true, dto.key >= 0);
comments.put(Integer.parseInt(dto.key), dto);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
/**
* torna la storia di uno slot, raggruppata per giorno
*
* @param tgt
* @return
*/
public LinkedHashMap<Date, Short> getHistoryTypicalHashMap(SoulissTypical tgt, TimeRangeEnum range) {
LinkedHashMap<Date, Short> comments = new LinkedHashMap<>();
Date dff;
Short how;
String limitCause = "";
Calendar now = Calendar.getInstance();
switch (range) {
case ALL_DATA:
// tutti i dati
break;
case LAST_WEEK:
now.add(Calendar.DATE, -7);
limitCause = " AND " + SoulissDBOpenHelper.COLUMN_LOG_DATE + " > " + now.getTime().getTime();
break;
case LAST_MONTH:
now.add(Calendar.MONTH, -1);
limitCause = " AND " + SoulissDBOpenHelper.COLUMN_LOG_DATE + " > " + now.getTime().getTime();
break;
case LAST_DAY:
now.add(Calendar.DAY_OF_WEEK_IN_MONTH, -1);
limitCause = " AND " + SoulissDBOpenHelper.COLUMN_LOG_DATE + " > " + now.getTime().getTime();
break;
default:
Log.e("DB", "Unexpected switch ERROR");
break;
}
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_LOGS, new String[]{
SoulissDBOpenHelper.COLUMN_LOG_DATE,
SoulissDBOpenHelper.COLUMN_LOG_VAL,
//"strftime('%Y-%m-%d', datetime((cldlogwhen/1000), 'unixepoch', 'localtime')) AS IDX",
//"AVG(CAST(flologval AS FLOAT)) AS AVG", "MIN(CAST(flologval AS FLOAT)) AS MIN",
//"MAX(CAST(flologval AS FLOAT)) AS MAX"
}
, SoulissDBOpenHelper.COLUMN_LOG_NODE_ID + " = "
+ tgt.getNodeId() + " AND " + SoulissDBOpenHelper.COLUMN_LOG_SLOT + " = "
+ tgt.getSlot() + limitCause + " ", null, null, null, SoulissDBOpenHelper.COLUMN_LOG_DATE + " ASC");
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
try {
dff = new Date(cursor.getLong(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_LOG_DATE)));
how = (short) cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_LOG_VAL));
//SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
//comments.put(dto.key, dto);
comments.put(dff, how);
} catch (Exception e) {
Log.e(TAG, "getHistoryTypicalHashMap", e);
}
cursor.moveToNext();
}
cursor.close();
return comments;
}
/**
* torna la storia di uno slot, raggruppata per giorno
*
* @param tgt
* @return
*/
public HashMap<Date, SoulissHistoryGraphData> getHistoryTypicalLogs(ISoulissTypical tgt, int range) {
HashMap<Date, SoulissHistoryGraphData> comments = new HashMap<>();
Date dff;
String limitCause = "";
Calendar now = Calendar.getInstance();
switch (range) {
case 0:
// tutti i dati
break;
case 2:
now.add(Calendar.DATE, -7);
limitCause = " AND " + SoulissDBOpenHelper.COLUMN_LOG_DATE + " > " + now.getTime().getTime();
break;
case 1:
now.add(Calendar.MONTH, -1);
limitCause = " AND " + SoulissDBOpenHelper.COLUMN_LOG_DATE + " > " + now.getTime().getTime();
break;
default:
Log.e("DB", "Unexpected switch ERROR");
break;
}
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_LOGS, new String[]{
"strftime('%Y-%m-%d', datetime((cldlogwhen/1000), 'unixepoch', 'localtime')) AS IDX",
"AVG(CAST(flologval AS FLOAT)) AS AVG", "MIN(CAST(flologval AS FLOAT)) AS MIN",
"MAX(CAST(flologval AS FLOAT)) AS MAX"}, SoulissDBOpenHelper.COLUMN_LOG_NODE_ID + " = "
+ tgt.getNodeId() + " AND " + SoulissDBOpenHelper.COLUMN_LOG_SLOT + " = "
+ tgt.getSlot() + limitCause + " ", null,
"strftime('%Y-%m-%d', datetime((cldlogwhen/1000), 'unixepoch', 'localtime'))", null, "IDX ASC");
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
try {
dff = Constants.yearFormat.parse(cursor.getString(0));
SoulissHistoryGraphData dto = new SoulissHistoryGraphData(cursor, dff);
comments.put(dto.key, dto);
} catch (ParseException e) {
Log.e(TAG, "getHistoryTypicalLogs", e);
}
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
public List<SoulissTypical> getNodeTypicals(SoulissNode parent) {
List<SoulissTypical> comments = new ArrayList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TYPICALS,
SoulissDBOpenHelper.COLUMN_TYPICAL_NODE_ID + " = " + parent.getNodeId(), null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
SoulissTypical newTyp = SoulissTypicalFactory.getTypical(context, dto.getTypical(), parent, dto, opts);
//TAG? no join, perche 1 a n
Cursor typTags = database.query(SoulissDBOpenHelper.TABLE_TAGS_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TAGS_TYPICAL,
SoulissDBOpenHelper.COLUMN_TAG_TYP_NODE_ID + " = " + dto.getNodeId()
+ " AND " + SoulissDBOpenHelper.COLUMN_TAG_TYP_SLOT + " = " + dto.getSlot(),
null, null, null, null);
typTags.moveToFirst();
while (!typTags.isAfterLast()) {
int tagId = typTags.getInt(typTags.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_TYP_TAG_ID));
if (tagId == SoulissDBOpenHelper.FAVOURITES_TAG_ID)
dto.setFavourite(true);
else
dto.setTagged(true);
typTags.moveToNext();
}
typTags.close();
//hack dto ID, could be different if parent is massive
newTyp.getTypicalDTO().setNodeId(parent.getNodeId());
newTyp.setParentNode(parent);
// if (newTyp.getTypical() !=
// Constants.Souliss_T_CurrentSensor_slave)
comments.add(newTyp);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
public LinkedList<SoulissCommand> getPositionalPrograms() {
LinkedList<SoulissCommand> ret = new LinkedList<>();
Cursor cursor = database
.query(SoulissDBOpenHelper.TABLE_COMMANDS, SoulissDBOpenHelper.ALLCOLUMNS_COMMANDS, SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " = "
+ Constants.COMMAND_COMEBACK_CODE + " OR " + SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " = "
+ Constants.COMMAND_GOAWAY_CODE, null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissCommandDTO comment = new SoulissCommandDTO(cursor);
// comments.add(comment);
cursor.moveToNext();
short node = comment.getNodeId();
short slot = comment.getSlot();
SoulissCommand adding;
if (node > MASSIVE_NODE_ID) {
SoulissTypical parentTypical = getTypical(node, slot);
adding = new SoulissCommand(context, comment, parentTypical);
} else {
adding = new SoulissCommand(context, comment);
}
adding.setSceneId(null);
ret.add(adding);
}
cursor.close();
return ret;
}
public List<SoulissTag> getRootTags() {
List<SoulissTag> comments = new ArrayList<>();
if (!database.isOpen())
open();
//solo radici
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TAGS, SoulissDBOpenHelper.ALLCOLUMNS_TAGS,
SoulissDBOpenHelper.COLUMN_TAG_FATHER_ID + " IS NULL ", null, null, null, SoulissDBOpenHelper.COLUMN_TAG_ORDER + ", " + SoulissDBOpenHelper.COLUMN_TAG_ID);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTag dto = new SoulissTag();
dto.setTagId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ID)));
dto.setName(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_NAME)));
dto.setTagOrder(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ORDER)));
dto.setIconResourceId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ICONID)));
dto.setImagePath(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_IMGPTH)));
dto.setFatherId(null);
Log.d(Constants.TAG, "retrieving ROOT TAG:" + dto.getTagId() + " ORDER:" + dto.getTagOrder());
dto.setAssignedTypicals(getTagTypicals(dto));
dto.setChildTags(getTagChild(dto));
comments.add(dto);
cursor.moveToNext();
}
cursor.close();
return comments;
}
public SoulissScene getScene(int sceneId) {
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_SCENES, SoulissDBOpenHelper.ALLCOLUMNS_SCENES, SoulissDBOpenHelper.COLUMN_SCENE_ID + " =" + sceneId, null, null, null,
SoulissDBOpenHelper.COLUMN_SCENE_ID);
cursor.moveToFirst();
SoulissScene comment = new SoulissScene(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_SCENE_ID)));
comment.setName(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_SCENE_NAME)));
comment.setIconResourceId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_SCENE_ICON)));
ArrayList<SoulissCommand> cmds = getSceneCommands(comment.getId());
comment.setCommandArray(cmds);
cursor.close();
return comment;
}
public ArrayList<SoulissCommand> getSceneCommands(int sceneId) {
ArrayList<SoulissCommand> ret = new ArrayList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_COMMANDS, SoulissDBOpenHelper.ALLCOLUMNS_COMMANDS,
SoulissDBOpenHelper.COLUMN_COMMAND_SCENEID + " =" + sceneId, null, null, null,
SoulissDBOpenHelper.COLUMN_COMMAND_SCHEDTIME);//se scenario, e` lo step
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissCommandDTO comment = new SoulissCommandDTO(cursor);
// comments.add(comment);
cursor.moveToNext();
short node = comment.getNodeId();
short slot = comment.getSlot();
SoulissCommand adding;
if (node > Constants.MASSIVE_NODE_ID) {
SoulissTypical tgt = getTypical(node, slot);
tgt.getTypicalDTO().setNodeId(node);
tgt.getTypicalDTO().setSlot(slot);
adding = new SoulissCommand(context, comment, tgt);
// comando massivo
} else {
SoulissNode fake = new SoulissNode(context, Constants.MASSIVE_NODE_ID);
SoulissTypical tgt = new SoulissTypical(context, opts);
tgt.setParentNode(fake);
tgt.getTypicalDTO().setNodeId(Constants.MASSIVE_NODE_ID);
// in caso di comando massivo, SLOT = TYPICAL
tgt.getTypicalDTO().setTypical(slot);
adding = new SoulissCommand(context, comment, tgt);
}
ret.add(adding);
}
// Make sure to close the cursor
cursor.close();
return ret;
}
public LinkedList<SoulissScene> getScenes() {
LinkedList<SoulissScene> ret = new LinkedList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_SCENES, SoulissDBOpenHelper.ALLCOLUMNS_SCENES, null, null, null, null,
SoulissDBOpenHelper.COLUMN_SCENE_ID);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissScene comment = new SoulissScene(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_SCENE_ID)));
comment.setName(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_SCENE_NAME)));
comment.setIconResourceId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_SCENE_ICON)));
cursor.moveToNext();
ArrayList<SoulissCommand> cmds = getSceneCommands(comment.getId());
comment.setCommandArray(cmds);
ret.add(comment);
}
// Make sure to close the cursor
cursor.close();
return ret;
}
public long getSize() {
return new File(database.getPath()).length();
}
public SoulissNode getSoulissNode(int nodeIN) {
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_NODES, SoulissDBOpenHelper.ALLCOLUMNS_NODES, SoulissDBOpenHelper.COLUMN_NODE_ID
+ " = " + nodeIN, null, null, null, null);
cursor.moveToFirst();
SoulissNode ret = SoulissNode.cursorToNode(context, cursor);
List<SoulissTypical> cod = getNodeTypicals(ret);
// come sono grezzo, 'sta riga fa schifo
for (SoulissTypical soulissTypical : cod) {
// if (soulissTypical.getNodeId() == comment.getNodeId())
ret.addTypical(soulissTypical);
}
cursor.close();
return ret;
}
public SoulissTriggerDTO getSoulissTrigger(long insertId) {
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TRIGGERS, SoulissDBOpenHelper.ALLCOLUMNS_TRIGGERS,
SoulissDBOpenHelper.COLUMN_TRIGGER_ID + " = " + insertId, null, null, null, null);
cursor.moveToFirst();
SoulissTriggerDTO dto = new SoulissTriggerDTO(cursor);
// SoulissTypical ret = SoulissTypical.typicalFactory(dto.getTypical(),
// getSoulissNode(node), dto);
cursor.close();
return dto;
}
public SoulissTag getTag(long tagId) throws SQLDataException {
SoulissTag dto = new SoulissTag();
if (!database.isOpen())
open();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TAGS, SoulissDBOpenHelper.ALLCOLUMNS_TAGS,
SoulissDBOpenHelper.COLUMN_TAG_ID + " = " + tagId, null, null, null, null);
if (cursor.isLast())
throw new SQLDataException("Non Existing TagId:" + tagId);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
dto.setTagOrder(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ORDER)));
dto.setTagId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ID)));
dto.setName(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_NAME)));
dto.setIconResourceId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ICONID)));
dto.setImagePath(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_IMGPTH)));
Long l = null;
if (!cursor.isNull(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_FATHER_ID)))
l = cursor.getLong(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_FATHER_ID));
dto.setFatherId(l);
List<SoulissTypical> devs = getTagTypicals(dto);
dto.setAssignedTypicals(devs);
dto.setChildTags(getTagChild(dto));
Log.i(Constants.TAG, "retrieved TAG:" + dto.getTagId() + " ORDER:" + dto.getTagOrder() + " Father:" + dto.getFatherId() + " Devices:" + dto.getAssignedTypicals().size() + " Childs:" + dto.getAssignedTypicals().size());
cursor.moveToNext();
}
cursor.close();
return dto;
}
protected List<SoulissTag> getTagChild(SoulissTag fatherDto) {
List<SoulissTag> ret = new ArrayList<>();
if (!database.isOpen())
open();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TAGS, SoulissDBOpenHelper.ALLCOLUMNS_TAGS,
SoulissDBOpenHelper.COLUMN_TAG_FATHER_ID + " = " + fatherDto.getTagId(), null, null, null, SoulissDBOpenHelper.COLUMN_TAG_ORDER);
if (cursor.isLast()) {
cursor.close();
return ret;//basta figli
}
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTag dtoI = new SoulissTag();
dtoI.setTagOrder(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ORDER)));
dtoI.setTagId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ID)));
dtoI.setName(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_NAME)));
dtoI.setIconResourceId(cursor.getInt(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_ICONID)));
dtoI.setImagePath(cursor.getString(cursor.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_IMGPTH)));
dtoI.setFatherId(fatherDto.getTagId());
Log.d(Constants.TAG, "retrieving TAG CHILD OF:" + fatherDto.getTagId() + " CHILD ID: " + dtoI.getTagId());
dtoI.setAssignedTypicals(getTagTypicals(dtoI));
dtoI.setChildTags(getTagChild(dtoI));//recursive
ret.add(dtoI);
cursor.moveToNext();
}
cursor.close();
return ret;
}
public List<SoulissTypical> getTagTypicals(SoulissTag parent) {
List<SoulissTypical> comments = new ArrayList<>();
String MY_QUERY = "SELECT * FROM " + SoulissDBOpenHelper.TABLE_TAGS_TYPICALS + " a "
+ " INNER JOIN " + SoulissDBOpenHelper.TABLE_TYPICALS + " b "
+ " ON a." + SoulissDBOpenHelper.COLUMN_TAG_TYP_NODE_ID + " = b." + SoulissDBOpenHelper.COLUMN_TYPICAL_NODE_ID
+ " AND a." + SoulissDBOpenHelper.COLUMN_TAG_TYP_SLOT + " = b." + SoulissDBOpenHelper.COLUMN_TYPICAL_SLOT
+ " WHERE a." + SoulissDBOpenHelper.COLUMN_TAG_TYP_TAG_ID + " = " + parent.getTagId()
+ " ORDER BY a." + SoulissDBOpenHelper.COLUMN_TAG_TYP_PRIORITY;
Cursor cursor = database.rawQuery(MY_QUERY, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
SoulissNode par = getSoulissNode(dto.getNodeId());
SoulissTypical newTyp = SoulissTypicalFactory.getTypical(context, dto.getTypical(), par, dto, opts);
//hack dto ID, could be different if parent is massive
newTyp.getTypicalDTO().setNodeId(dto.getNodeId());
//Se e` qui, e` taggato
if (parent.getTagId() == 0)
newTyp.getTypicalDTO().setFavourite(true);
newTyp.getTypicalDTO().setTagged(true);
comments.add(newTyp);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
public SoulissTriggerDTO getTriggerByCommandId(long triggerId) {
String MY_QUERY = "SELECT * FROM " + SoulissDBOpenHelper.TABLE_TRIGGERS + " a " + "INNER JOIN "
+ SoulissDBOpenHelper.TABLE_COMMANDS + " b ON a." + SoulissDBOpenHelper.COLUMN_TRIGGER_COMMAND_ID + " = b."
+ SoulissDBOpenHelper.COLUMN_COMMAND_ID + " WHERE " + SoulissDBOpenHelper.COLUMN_TRIGGER_COMMAND_ID + " = " + triggerId;
Cursor cursor = database.rawQuery(MY_QUERY, null);
cursor.moveToFirst();
if (BuildConfig.DEBUG && !(cursor.getColumnCount() == 1))
throw new RuntimeException("cursor.getColumnCount() != 1");
SoulissTriggerDTO comment = new SoulissTriggerDTO(cursor);
// cols.setTriggerDTO(comment);
// Make sure to close the cursor
cursor.close();
return comment;
}
/**
* Ritorna mappa di tutti i comandi, indicizzati per ID
*
* @return
*/
public SparseArray<SoulissTriggerDTO> getTriggerMap() {
SparseArray<SoulissTriggerDTO> ret = new SparseArray<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TRIGGERS, SoulissDBOpenHelper.ALLCOLUMNS_TRIGGERS, null, null, null, null,
null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTriggerDTO dto = new SoulissTriggerDTO(cursor);
ret.put((int) dto.getCommandId(), dto);
cursor.moveToNext();
}
cursor.close();
return ret;
}
/**
* DB typical factory
*
* @param node
* @param slot
* @return produced Typical
*/
public SoulissTypical getTypical(int node, short slot) {
// query with primary key
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TYPICALS,
SoulissDBOpenHelper.COLUMN_TYPICAL_NODE_ID + " = " + node + " AND " + SoulissDBOpenHelper.COLUMN_TYPICAL_SLOT + " = "
+ slot, null, null, null, null);
cursor.moveToFirst();
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
//TAG? no join, perche 1 a n
Cursor typTags = database.query(SoulissDBOpenHelper.TABLE_TAGS_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TAGS_TYPICAL,
SoulissDBOpenHelper.COLUMN_TAG_TYP_NODE_ID + " = " + dto.getNodeId()
+ " AND " + SoulissDBOpenHelper.COLUMN_TAG_TYP_SLOT + " = " + dto.getSlot(),
null, null, null, null);
typTags.moveToFirst();
while (!typTags.isAfterLast()) {
int tagId = typTags.getInt(typTags.getColumnIndex(SoulissDBOpenHelper.COLUMN_TAG_TYP_TAG_ID));
if (tagId == SoulissDBOpenHelper.FAVOURITES_TAG_ID)
dto.setFavourite(true);
else
dto.setTagged(true);
typTags.moveToNext();
}
typTags.close();
SoulissTypical ret = SoulissTypicalFactory.getTypical(context, dto.getTypical(), getSoulissNode(node), dto, opts);
cursor.close();
return ret;
}
public ArrayList<SoulissLogDTO> getTypicalLogs(SoulissTypical tgt) {
ArrayList<SoulissLogDTO> comments = new ArrayList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_LOGS, SoulissDBOpenHelper.ALLCOLUMNS_LOGS, SoulissDBOpenHelper.COLUMN_LOG_NODE_ID
+ " = " + tgt.getNodeId() + " AND " + SoulissDBOpenHelper.COLUMN_LOG_SLOT + " = "
+ tgt.getSlot(), null, null, null, SoulissDBOpenHelper.COLUMN_LOG_DATE + " ASC");
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissLogDTO dto = new SoulissLogDTO(cursor);
comments.add(dto);
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
/**
* TO TEST
*
* @param tgt
* @param range
* @return
*/
public ArrayList<ClockPieHelper> getTypicalOnClockPie(SoulissTypical tgt, TimeRangeEnum range) {
ArrayList<ClockPieHelper> clockPieHelperArrayList = new ArrayList<>();
LinkedHashMap<Date, Short> comments = getHistoryTypicalHashMap(tgt, range);
boolean firstGo = true;
Date accStart = new Date();
for (Date cur : comments.keySet()) {
Short val = comments.get(cur);
if (val != Constants.Typicals.Souliss_T1n_OffCoil && val != Constants.Typicals.Souliss_T1n_OffCoil_Auto) {
//spento, inizia nuovo per
accStart = cur;
firstGo = false;
} else if (!firstGo) {
Calendar start = Calendar.getInstance();
Calendar stop = Calendar.getInstance();
start.setTime(accStart);
stop.setTime(cur);
//aggiungo fetta sse piu di un minuto
if (!(start.get(Calendar.HOUR_OF_DAY) == stop.get(Calendar.HOUR_OF_DAY) &&
(start.get(Calendar.MINUTE) == stop.get(Calendar.MINUTE)))) {
Log.d(Constants.TAG, "Aggiungo fetta dalle " + start.get(Calendar.HOUR_OF_DAY) + ":" + start.get(Calendar.MINUTE)
+ " alle " + stop.get(Calendar.HOUR_OF_DAY) + ":" + stop.get(Calendar.MINUTE));
clockPieHelperArrayList.add(new ClockPieHelper(start.get(Calendar.HOUR_OF_DAY), start.get(Calendar.MINUTE),
stop.get(Calendar.HOUR_OF_DAY), stop.get(Calendar.MINUTE)));
}
firstGo = true;
}
}
return clockPieHelperArrayList;
}
/**
* TO TEST
*
* @param tgt
* @param range
* @return
*/
public int getTypicalOnDurationMsec(SoulissTypical tgt, TimeRangeEnum range) {
LinkedHashMap<Date, Short> comments = getHistoryTypicalHashMap(tgt, range);
int accumulator = 0;
boolean firstGo = true;
Date accStart = new Date();
for (Date cur : comments.keySet()) {
Short val = comments.get(cur);
if (val != Constants.Typicals.Souliss_T1n_OffCoil && val != Constants.Typicals.Souliss_T1n_OffCoil_Auto) {
//spento, inizia nuovo per
accStart = cur;
firstGo = false;
} else if (!firstGo) {
accumulator += cur.getTime() - accStart.getTime();
firstGo = true;
}
}
return accumulator;
}
public LinkedList<SoulissCommand> getUnexecutedCommands(Context context) {
LinkedList<SoulissCommand> ret = new LinkedList<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_COMMANDS, SoulissDBOpenHelper.ALLCOLUMNS_COMMANDS, " ("
+ SoulissDBOpenHelper.COLUMN_COMMAND_EXECTIME + " is null OR " + SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " ="
+ Constants.COMMAND_COMEBACK_CODE + " OR " + SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " ="
+ Constants.COMMAND_GOAWAY_CODE + " OR " + SoulissDBOpenHelper.COLUMN_COMMAND_TYPE + " ="
+ Constants.COMMAND_TRIGGERED + ") AND " + SoulissDBOpenHelper.COLUMN_COMMAND_SCENEID + " IS NULL", null, null,
null, SoulissDBOpenHelper.COLUMN_COMMAND_SCHEDTIME);
cursor.moveToFirst();
Log.d(Constants.TAG, "Found unexecuted commands:" + cursor.getCount());
while (!cursor.isAfterLast()) {
SoulissCommandDTO comment = new SoulissCommandDTO(cursor);
Log.d(Constants.TAG, "Found command last exec at:" + comment.getExecutedTime());
cursor.moveToNext();
short node = comment.getNodeId();
short slot = comment.getSlot();
SoulissCommand adding = null;
if (node > Constants.MASSIVE_NODE_ID) {
SoulissTypical tgt = getTypical(node, slot);
//hack nodeId, Massive
tgt.getTypicalDTO().setNodeId(node);
tgt.getTypicalDTO().setSlot(slot);
adding = new SoulissCommand(context, comment, tgt);
} else if (node > Constants.COMMAND_FAKE_SCENE) {
// List massivi = getUniqueTypicals(node);
SoulissNode minchia = new SoulissNode(context, Constants.MASSIVE_NODE_ID);
List<SoulissTypical> massivi = getUniqueTypicals(minchia);
Log.d(Constants.TAG, "Massive command found, Typical:" + slot);
for (SoulissTypical cazzuto : massivi) {
if (slot == cazzuto.getTypicalDTO().getTypical()) {
adding = new SoulissCommand(context, comment, cazzuto);
}
}
} else {
//scena, ovvero id scena da eseguire = slot
adding = new SoulissCommand(context, comment);
}
assertNotNull(adding);
ret.add(adding);
}
cursor.close();
return ret;
}
/**
* @param parent fake node, id -1
* @return
*/
public List<SoulissTypical> getUniqueTypicals(SoulissNode parent) {
ArrayList<SoulissTypical> comments = new ArrayList<>();
HashSet<Short> pool = new HashSet<>();
Cursor cursor = database.query(SoulissDBOpenHelper.TABLE_TYPICALS, SoulissDBOpenHelper.ALLCOLUMNS_TYPICALS, null, null, null, null,
null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
SoulissTypicalDTO dto = new SoulissTypicalDTO(cursor);
SoulissTypical newTyp = SoulissTypicalFactory.getTypical(context, dto.getTypical(), parent, dto, opts);
newTyp.setParentNode(parent);
if (parent.getNodeId() == Constants.MASSIVE_NODE_ID) {
//hack dto ID, could be different if parent is massive
newTyp.getTypicalDTO().setNodeId(parent.getNodeId());
newTyp.getTypicalDTO().setSlot(dto.getTypical());
}
// if (newTyp.getTypical() !=
// Constants.Souliss_T_CurrentSensor_slave)
if (!pool.contains(dto.getTypical())) {
comments.add(newTyp);
pool.add(dto.getTypical());
}
cursor.moveToNext();
}
// Make sure to close the cursor
cursor.close();
return comments;
}
/**
* presuppone che il nodo esista, asserError altrimenti Light update, solo
* data ed health, pensato per JSON
*
* @param nodeIN
* @return
*/
@Deprecated
public int refreshNode(SoulissNode nodeIN) {
ContentValues values = new ContentValues();
// wrap values from object
values.put(SoulissDBOpenHelper.COLUMN_NODE_LASTMOD, Calendar.getInstance().getTime().getTime());
values.put(SoulissDBOpenHelper.COLUMN_NODE_HEALTH, nodeIN.getHealth());
long upd = database.update(SoulissDBOpenHelper.TABLE_NODES, values, SoulissDBOpenHelper.COLUMN_NODE_ID + " = " + nodeIN.getNodeId(),
null);
assertEquals(upd, 1);
return (int) upd;
}
/**
* presuppone che il nodo esista, asserError altrimenti Light update, solo
* data ed health, pensato per JSON. Skippa refresh se tipico vuoto
*
* @param nodeIN
* @return
*/
@Deprecated
public int refreshNodeAndTypicals(SoulissNode nodeIN) {
ContentValues values = new ContentValues();
// wrap values from object
values.put(SoulissDBOpenHelper.COLUMN_NODE_LASTMOD, Calendar.getInstance().getTime().getTime());
values.put(SoulissDBOpenHelper.COLUMN_NODE_HEALTH, nodeIN.getHealth());
long upd = database.update(SoulissDBOpenHelper.TABLE_NODES, values, SoulissDBOpenHelper.COLUMN_NODE_ID + " = " + nodeIN.getNodeId(),
null);
assertEquals(upd, 1);
List<SoulissTypical> tips = nodeIN.getTypicals();
for (SoulissTypical x : tips) {
if (!x.isEmpty())
x.refresh();
}
return (int) upd;
}
public void truncateAll() {
if (soulissDatabase != null) {
Log.w(TAG, "DB dropCreate !!!");
soulissDatabase.dropCreate(database);
}
}
public int truncateImportTables() {
int ret;
ret = database.delete(SoulissDBOpenHelper.TABLE_LOGS, null, null);
ret += database.delete(SoulissDBOpenHelper.TABLE_TYPICALS, null, null);
ret += database.delete(SoulissDBOpenHelper.TABLE_NODES, null, null);
return ret;
}
}
| |
package org.peercentrum.core;
import static org.junit.Assert.assertEquals;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import org.peercentrum.TransientMockNetworkOfNodes;
import org.peercentrum.application.BaseApplicationMessageHandler;
import org.peercentrum.core.PB.HeaderMsg;
import org.peercentrum.network.HeaderAndPayload;
import org.peercentrum.network.NetworkClientTCPConnection;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.util.ResourceLeakDetector;
import io.netty.util.ResourceLeakDetector.Level;
import io.netty.util.concurrent.Future;
public class AsyncSocketServerTest {
final int NB_CLIENTS=10;
final int NUMBER_OF_MESSAGE = 2;
static class MessageEchoApp extends BaseApplicationMessageHandler {
public static final ApplicationIdentifier ECHO_APP = new ApplicationIdentifier("EchoApp".getBytes());
CountDownLatch countdownLatch;
public AtomicInteger numberOfMessagesReceived=new AtomicInteger();
public MessageEchoApp(ServerMain serverMain, CountDownLatch serverSideLatch) {
super(serverMain);
countdownLatch = serverSideLatch;
}
@Override
public ApplicationIdentifier getApplicationId() {
return ECHO_APP;
}
@Override
public HeaderAndPayload generateReponseFromQuery(ChannelHandlerContext ctx, HeaderAndPayload receivedRequest) {
if(countdownLatch!=null){
countdownLatch.countDown();
}
numberOfMessagesReceived.incrementAndGet();
// System.out.println("Payload is of size "+receivedRequest.payload.readableBytes());
HeaderMsg.Builder headerBuilder = newResponseHeaderForRequest(receivedRequest);
HeaderAndPayload response = new HeaderAndPayload(headerBuilder, receivedRequest.payload);
return response;
}
}
@Test
public void testAsyncSocketServer() throws Exception {
ResourceLeakDetector.setLevel(Level.ADVANCED);
TransientMockNetworkOfNodes mockNetworkOfNodes=new TransientMockNetworkOfNodes();
final CountDownLatch serverDoneBarrier = new CountDownLatch(NB_CLIENTS*NUMBER_OF_MESSAGE);
MessageEchoApp serverSideCountingHandler=new MessageEchoApp(mockNetworkOfNodes.server1, serverDoneBarrier);
final CountDownLatch clientsDoneBarrier = new CountDownLatch(NB_CLIENTS);
for(int i=0; i<NB_CLIENTS; i++){
new Thread(){ @Override public void run() {
try {
doNettyClientWrite(mockNetworkOfNodes.client1ToServer1Connection);
clientsDoneBarrier.countDown();
} catch (Exception e) {
e.printStackTrace();
}
}
}.start();
}
clientsDoneBarrier.await();
mockNetworkOfNodes.client1ToServer1Connection.close();
serverDoneBarrier.await();
mockNetworkOfNodes.server1.networkServer.stopAcceptingConnections();
assertEquals(NB_CLIENTS*NUMBER_OF_MESSAGE, serverSideCountingHandler.numberOfMessagesReceived.intValue());
}
private void doNettyClientWrite(NetworkClientTCPConnection connection) throws InterruptedException, ExecutionException {
ByteBuf helloWorldBuffer = Unpooled.wrappedBuffer("Hello world".getBytes());
Future<ByteBuf> helloWorldResponse=connection.sendRequestBytes(MessageEchoApp.ECHO_APP, helloWorldBuffer);
ByteBuf bonjourBuffer = Unpooled.wrappedBuffer("Bonjour le monde".getBytes());
Future<ByteBuf> bonjourResponse=connection.sendRequestBytes(MessageEchoApp.ECHO_APP, bonjourBuffer);
assertEquals(bonjourBuffer, bonjourResponse.get());
assertEquals(helloWorldBuffer, helloWorldResponse.get());
}
// protected void doClientWrite(InetSocketAddress remoteAddress) throws Exception {
// java.nio.channels.SocketChannel clientChannel = java.nio.channels.SocketChannel.open(remoteAddress);
// clientChannel.setOption(StandardSocketOptions.SO_LINGER, 10);
//
// for(int j=0; j<NUMBER_OF_MESSAGE; j++){
// int MESSAGE_SIZE=j+600;
// ByteBuf payloadBytesToWrite = Unpooled.buffer(MESSAGE_SIZE);
// for (int i=0; i < MESSAGE_SIZE; i++) {
// payloadBytesToWrite.writeByte(0+i);
// }
// sendHeaderAndPayload(clientChannel, MessageEchoApp.ECHO_APP, payloadBytesToWrite);
// receivedHeaderAndPayload(clientChannel);
// }
//
// PB.NetworkMessage closeMsg=PB.NetworkMessage.newBuilder()
// .setOperation(NetworkOperation.CLOSE_CONNECTION).build();
// sendHeaderAndProtobufMessage(clientChannel, NetworkApplication.NETWORK_APPID, closeMsg);
//
// clientChannel.close();
// System.out.println("Client done");
// }
//
//
// private HeaderAndPayload receivedHeaderAndPayload(SocketChannel clientChannel) throws IOException {
// int headerLength=readProtobufLength(clientChannel);
// ByteBuffer headerBytes = ByteBuffer.allocateDirect(headerLength);
// while(headerBytes.hasRemaining()){
// int nbBytesRead=clientChannel.read(headerBytes);
// if(nbBytesRead<0){
// break;
// }
// }
//
// int payloadLength=readProtobufLength(clientChannel);
// ByteBuffer payloadBytes = ByteBuffer.allocateDirect(payloadLength);
// while(payloadBytes.hasRemaining()){
// int nbBytesRead=clientChannel.read(payloadBytes);
// if(nbBytesRead<0){
// break;
// }
// }
//
// HeaderMsg header=byteBufToProcolBuffer(headerBytes, PB.HeaderMsg.newBuilder());
// HeaderAndPayload headerAndPayload= new HeaderAndPayload(header);
// headerAndPayload.payload=Unpooled.copiedBuffer(payloadBytes);
// return headerAndPayload;
// }
//
//
// @SuppressWarnings("unchecked")
// protected <T extends MessageLite> T byteBufToProcolBuffer(ByteBuffer msg, Builder builder) throws InvalidProtocolBufferException {
// final byte[] array;
// final int offset;
// final int length = msg.remaining();
// if (msg.hasArray()) {
// array = msg.array();
// offset = msg.arrayOffset() + msg.position();
// } else {
// array = new byte[length];
// msg.get(array, 0, length);
// offset = 0;
// }
//
// return (T) builder.mergeFrom(array, offset, length).build();
// }
//
// protected int readProtobufLength(SocketChannel clientChannel) throws IOException {
// final byte[] buf = new byte[5];
// ByteBuffer oneByte = ByteBuffer.allocateDirect(1);
// for (int i = 0; i < buf.length; i ++) {
// int nbBytesRead=clientChannel.read(oneByte);
// if(nbBytesRead==0){
// i--;
// continue;
// }
//
// if(nbBytesRead<0){
// throw new IOException("Read error EOS");
// }
// oneByte.flip();
//
// buf[i] = oneByte.get();
// if (buf[i] >= 0) {
// int length = CodedInputStream.newInstance(buf, 0, i + 1).readRawVarint32();
// if (length < 0) {
// throw new CorruptedFrameException("negative length: " + length);
// }
// return length;
// }
// }
//
// // Couldn't find the byte whose MSB is off.
// throw new CorruptedFrameException("length wider than 32-bit");
// }
//
//
// private void sendHeaderAndProtobufMessage(SocketChannel socketChannel, ApplicationIdentifier appid, NetworkMessage protobufMsg) throws Exception {
// ProtobufNetworkMessageCodec codec = new ProtobufNetworkMessageCodec();
// sendHeaderAndPayload(socketChannel, appid, codec.encode(protobufMsg));
// }
//
//
// protected void sendHeaderAndPayload(
// java.nio.channels.SocketChannel socketChannel,
// ApplicationIdentifier appid, ByteBuf payloadBytesToWrite) throws IOException {
// HeaderMsg headerMessage = PB.HeaderMsg.newBuilder()
// .setApplicationId(ByteString.copyFrom(appid.toByteArray()))
// .build();
// byte[] headerMessageBytes = headerMessage.toByteArray();
//
// writeProtobufLength(socketChannel, headerMessageBytes.length);
// ByteBuffer headerBytesToWrite = ByteBuffer.wrap(headerMessageBytes);
// while (headerBytesToWrite.hasRemaining()) {
// socketChannel.write(headerBytesToWrite);
// }
//
// ByteBuffer payloadNioBuffer = payloadBytesToWrite.nioBuffer();
// writeProtobufLength(socketChannel, payloadNioBuffer.limit());
// while (payloadNioBuffer.hasRemaining()) {
// socketChannel.write(payloadNioBuffer);
// }
// }
//
// protected void writeProtobufLength(
// java.nio.channels.SocketChannel clientChannel,
// int length) throws IOException {
// byte[] codedLengthBytes=new byte[CodedOutputStream.computeRawVarint32Size(length)];
// CodedOutputStream codedOS = CodedOutputStream.newInstance(codedLengthBytes);
// codedOS.writeRawVarint32(length);
// codedOS.flush();
// ByteBuffer codedLengthByteBuffer=ByteBuffer.wrap(codedLengthBytes);
// while(codedLengthByteBuffer.hasRemaining()){
// clientChannel.write(codedLengthByteBuffer);
// }
// }
public static void main(String[] args) throws Exception {
new AsyncSocketServerTest().testAsyncSocketServer();
}
}
| |
/*-
* * Copyright 2016 Skymind, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*/
package org.datavec.api.records.reader.impl;
import org.datavec.api.io.labels.PathLabelGenerator;
import org.datavec.api.records.Record;
import org.datavec.api.records.metadata.RecordMetaData;
import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.records.reader.impl.jackson.FieldSelection;
import org.datavec.api.records.reader.impl.jackson.JacksonRecordReader;
import org.datavec.api.split.InputSplit;
import org.datavec.api.split.NumberedFileInputSplit;
import org.datavec.api.util.ClassPathResource;
import org.datavec.api.writable.IntWritable;
import org.datavec.api.writable.Text;
import org.datavec.api.writable.Writable;
import org.junit.Test;
import org.nd4j.shade.jackson.core.JsonFactory;
import org.nd4j.shade.jackson.databind.ObjectMapper;
import org.nd4j.shade.jackson.dataformat.xml.XmlFactory;
import org.nd4j.shade.jackson.dataformat.yaml.YAMLFactory;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
/**
* Created by Alex on 11/04/2016.
*/
public class JacksonRecordReaderTest {
@Test
public void testReadingJson() throws Exception {
//Load 3 values from 3 JSON files
//stricture: a:value, b:value, c:x:value, c:y:value
//And we want to load only a:value, b:value and c:x:value
//For first JSON file: all values are present
//For second JSON file: b:value is missing
//For third JSON file: c:x:value is missing
ClassPathResource cpr = new ClassPathResource("json/json_test_0.txt");
String path = cpr.getFile().getAbsolutePath().replace("0", "%d");
InputSplit is = new NumberedFileInputSplit(path, 0, 2);
RecordReader rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new JsonFactory()));
rr.initialize(is);
testJacksonRecordReader(rr);
}
@Test
public void testReadingYaml() throws Exception {
//Exact same information as JSON format, but in YAML format
ClassPathResource cpr = new ClassPathResource("yaml/yaml_test_0.txt");
String path = cpr.getFile().getAbsolutePath().replace("0", "%d");
InputSplit is = new NumberedFileInputSplit(path, 0, 2);
RecordReader rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new YAMLFactory()));
rr.initialize(is);
testJacksonRecordReader(rr);
}
@Test
public void testReadingXml() throws Exception {
//Exact same information as JSON format, but in XML format
ClassPathResource cpr = new ClassPathResource("xml/xml_test_0.txt");
String path = cpr.getFile().getAbsolutePath().replace("0", "%d");
InputSplit is = new NumberedFileInputSplit(path, 0, 2);
RecordReader rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new XmlFactory()));
rr.initialize(is);
testJacksonRecordReader(rr);
}
private static FieldSelection getFieldSelection() {
return new FieldSelection.Builder().addField("a").addField(new Text("MISSING_B"), "b")
.addField(new Text("MISSING_CX"), "c", "x").build();
}
private static void testJacksonRecordReader(RecordReader rr) {
List<Writable> json0 = rr.next();
List<Writable> exp0 = Arrays.asList((Writable) new Text("aValue0"), new Text("bValue0"), new Text("cxValue0"));
assertEquals(exp0, json0);
List<Writable> json1 = rr.next();
List<Writable> exp1 =
Arrays.asList((Writable) new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1"));
assertEquals(exp1, json1);
List<Writable> json2 = rr.next();
List<Writable> exp2 =
Arrays.asList((Writable) new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX"));
assertEquals(exp2, json2);
assertFalse(rr.hasNext());
//Test reset
rr.reset();
assertEquals(exp0, rr.next());
assertEquals(exp1, rr.next());
assertEquals(exp2, rr.next());
assertFalse(rr.hasNext());
}
@Test
public void testAppendingLabels() throws Exception {
ClassPathResource cpr = new ClassPathResource("json/json_test_0.txt");
String path = cpr.getFile().getAbsolutePath().replace("0", "%d");
InputSplit is = new NumberedFileInputSplit(path, 0, 2);
//Insert at the end:
RecordReader rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new JsonFactory()), false, -1,
new LabelGen());
rr.initialize(is);
List<Writable> exp0 = Arrays.asList((Writable) new Text("aValue0"), new Text("bValue0"), new Text("cxValue0"),
new IntWritable(0));
assertEquals(exp0, rr.next());
List<Writable> exp1 = Arrays.asList((Writable) new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1"),
new IntWritable(1));
assertEquals(exp1, rr.next());
List<Writable> exp2 = Arrays.asList((Writable) new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX"),
new IntWritable(2));
assertEquals(exp2, rr.next());
//Insert at position 0:
rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new JsonFactory()), false, -1,
new LabelGen(), 0);
rr.initialize(is);
exp0 = Arrays.asList((Writable) new IntWritable(0), new Text("aValue0"), new Text("bValue0"),
new Text("cxValue0"));
assertEquals(exp0, rr.next());
exp1 = Arrays.asList((Writable) new IntWritable(1), new Text("aValue1"), new Text("MISSING_B"),
new Text("cxValue1"));
assertEquals(exp1, rr.next());
exp2 = Arrays.asList((Writable) new IntWritable(2), new Text("aValue2"), new Text("bValue2"),
new Text("MISSING_CX"));
assertEquals(exp2, rr.next());
}
@Test
public void testAppendingLabelsMetaData() throws Exception {
ClassPathResource cpr = new ClassPathResource("json/json_test_0.txt");
String path = cpr.getFile().getAbsolutePath().replace("0", "%d");
InputSplit is = new NumberedFileInputSplit(path, 0, 2);
//Insert at the end:
RecordReader rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new JsonFactory()), false, -1,
new LabelGen());
rr.initialize(is);
List<List<Writable>> out = new ArrayList<>();
while (rr.hasNext()) {
out.add(rr.next());
}
assertEquals(3, out.size());
rr.reset();
List<List<Writable>> out2 = new ArrayList<>();
List<Record> outRecord = new ArrayList<>();
List<RecordMetaData> meta = new ArrayList<>();
while (rr.hasNext()) {
Record r = rr.nextRecord();
out2.add(r.getRecord());
outRecord.add(r);
meta.add(r.getMetaData());
}
assertEquals(out, out2);
List<Record> fromMeta = rr.loadFromMetaData(meta);
assertEquals(outRecord, fromMeta);
}
private static class LabelGen implements PathLabelGenerator {
@Override
public Writable getLabelForPath(String path) {
if (path.endsWith("0.txt"))
return new IntWritable(0);
else if (path.endsWith("1.txt"))
return new IntWritable(1);
else
return new IntWritable(2);
}
@Override
public Writable getLabelForPath(URI uri) {
return getLabelForPath(uri.getPath());
}
@Override
public boolean inferLabelClasses() {
return true;
}
}
}
| |
// Copyright (C) 2017 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.permissions;
import static com.google.gerrit.server.permissions.LabelPermission.ForUser.ON_BEHALF_OF;
import static com.google.gerrit.server.permissions.LabelPermission.ForUser.SELF;
import static java.util.Objects.requireNonNull;
import com.google.gerrit.entities.LabelType;
import com.google.gerrit.entities.LabelValue;
import com.google.gerrit.server.util.LabelVote;
/** Permission representing a label. */
public class LabelPermission implements ChangePermissionOrLabel {
public enum ForUser {
SELF,
ON_BEHALF_OF;
}
private final ForUser forUser;
private final String name;
/**
* Construct a reference to a label permission.
*
* @param type type description of the label.
*/
public LabelPermission(LabelType type) {
this(SELF, type);
}
/**
* Construct a reference to a label permission.
*
* @param forUser {@code SELF} (default) or {@code ON_BEHALF_OF} for labelAs behavior.
* @param type type description of the label.
*/
public LabelPermission(ForUser forUser, LabelType type) {
this(forUser, type.getName());
}
/**
* Construct a reference to a label permission.
*
* @param name name of the label, e.g. {@code "Code-Review"} or {@code "Verified"}.
*/
public LabelPermission(String name) {
this(SELF, name);
}
/**
* Construct a reference to a label permission.
*
* @param forUser {@code SELF} (default) or {@code ON_BEHALF_OF} for labelAs behavior.
* @param name name of the label, e.g. {@code "Code-Review"} or {@code "Verified"}.
*/
public LabelPermission(ForUser forUser, String name) {
this.forUser = requireNonNull(forUser, "ForUser");
this.name = LabelType.checkName(name);
}
/** Returns {@code SELF} or {@code ON_BEHALF_OF} (or labelAs). */
public ForUser forUser() {
return forUser;
}
/** Returns name of the label, e.g. {@code "Code-Review"}. */
public String label() {
return name;
}
@Override
public String describeForException() {
if (forUser == ON_BEHALF_OF) {
return "label on behalf of " + name;
}
return "label " + name;
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public boolean equals(Object other) {
if (other instanceof LabelPermission) {
LabelPermission b = (LabelPermission) other;
return forUser == b.forUser && name.equals(b.name);
}
return false;
}
@Override
public String toString() {
if (forUser == ON_BEHALF_OF) {
return "LabelAs[" + name + ']';
}
return "Label[" + name + ']';
}
/** A {@link LabelPermission} at a specific value. */
public static class WithValue implements ChangePermissionOrLabel {
private final ForUser forUser;
private final LabelVote label;
/**
* Construct a reference to a label at a specific value.
*
* @param type description of the label.
* @param value numeric score assigned to the label.
*/
public WithValue(LabelType type, LabelValue value) {
this(SELF, type, value);
}
/**
* Construct a reference to a label at a specific value.
*
* @param type description of the label.
* @param value numeric score assigned to the label.
*/
public WithValue(LabelType type, short value) {
this(SELF, type.getName(), value);
}
/**
* Construct a reference to a label at a specific value.
*
* @param forUser {@code SELF} (default) or {@code ON_BEHALF_OF} for labelAs behavior.
* @param type description of the label.
* @param value numeric score assigned to the label.
*/
public WithValue(ForUser forUser, LabelType type, LabelValue value) {
this(forUser, type.getName(), value.getValue());
}
/**
* Construct a reference to a label at a specific value.
*
* @param forUser {@code SELF} (default) or {@code ON_BEHALF_OF} for labelAs behavior.
* @param type description of the label.
* @param value numeric score assigned to the label.
*/
public WithValue(ForUser forUser, LabelType type, short value) {
this(forUser, type.getName(), value);
}
/**
* Construct a reference to a label at a specific value.
*
* @param name name of the label, e.g. {@code "Code-Review"} or {@code "Verified"}.
* @param value numeric score assigned to the label.
*/
public WithValue(String name, short value) {
this(SELF, name, value);
}
/**
* Construct a reference to a label at a specific value.
*
* @param forUser {@code SELF} (default) or {@code ON_BEHALF_OF} for labelAs behavior.
* @param name name of the label, e.g. {@code "Code-Review"} or {@code "Verified"}.
* @param value numeric score assigned to the label.
*/
public WithValue(ForUser forUser, String name, short value) {
this(forUser, LabelVote.create(name, value));
}
/**
* Construct a reference to a label at a specific value.
*
* @param label label name and vote.
*/
public WithValue(LabelVote label) {
this(SELF, label);
}
/**
* Construct a reference to a label at a specific value.
*
* @param forUser {@code SELF} (default) or {@code ON_BEHALF_OF} for labelAs behavior.
* @param label label name and vote.
*/
public WithValue(ForUser forUser, LabelVote label) {
this.forUser = requireNonNull(forUser, "ForUser");
this.label = requireNonNull(label, "LabelVote");
}
/** Returns {@code SELF} or {@code ON_BEHALF_OF} (or labelAs). */
public ForUser forUser() {
return forUser;
}
/** Returns name of the label, e.g. {@code "Code-Review"}. */
public String label() {
return label.label();
}
/** Returns specific value of the label, e.g. 1 or 2. */
public short value() {
return label.value();
}
@Override
public String describeForException() {
if (forUser == ON_BEHALF_OF) {
return "label on behalf of " + label.formatWithEquals();
}
return "label " + label.formatWithEquals();
}
@Override
public int hashCode() {
return label.hashCode();
}
@Override
public boolean equals(Object other) {
if (other instanceof WithValue) {
WithValue b = (WithValue) other;
return forUser == b.forUser && label.equals(b.label);
}
return false;
}
@Override
public String toString() {
if (forUser == ON_BEHALF_OF) {
return "LabelAs[" + label.format() + ']';
}
return "Label[" + label.format() + ']';
}
}
}
| |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.infobar;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.animation.PropertyValuesHolder;
import android.annotation.TargetApi;
import android.os.Build;
import android.view.View;
import android.view.ViewTreeObserver;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.LinearLayout;
import android.widget.TextView;
import org.chromium.chrome.R;
import java.util.ArrayList;
/**
* Sets up animations to move InfoBars around inside of the InfoBarContainer.
*
* Animations proceed in several phases:
* 1) Prep work is done for the InfoBar so that the View being animated in (if it exists) is
* properly sized. This involves adding the View to a FrameLayout with a visibility of
* INVISIBLE and triggering a layout.
*
* 2) Once the View has an actual size, we compute all of the actions needed for the animation.
* We use translations primarily to slide things in and out of the screen as things are shown,
* hidden, or resized.
*
* 3) The animation is kicked off and the animations run. During this phase, the View being shown
* is added to ContentWrapperView.
*
* 4) At the end of the animation, we clean up everything and make sure all the children are in the
* right places.
*/
public class AnimationHelper implements ViewTreeObserver.OnGlobalLayoutListener {
private static final long ANIMATION_DURATION_MS = 250;
public static final int ANIMATION_TYPE_SHOW = 0;
public static final int ANIMATION_TYPE_SWAP = 1;
public static final int ANIMATION_TYPE_HIDE = 2;
public static final int ANIMATION_TYPE_BOUNDARY = 3;
private final InfoBarContainer mContainer;
private final LinearLayout mLinearLayout;
private final InfoBar mInfoBar;
private final ContentWrapperView mTargetWrapperView;
private final AnimatorSet mAnimatorSet;
private final int mAnimationType;
private final View mToShow;
private boolean mAnimationStarted;
/**
* Creates and starts an animation.
* @param container InfoBarContainer that is having its InfoBars animated.
* @param target ContentWrapperView that is the focus of the animation and is being resized,
* shown, or hidden.
* @param infoBar InfoBar that goes with the specified ContentWrapperView.
* @param toShow If non-null, this View will replace whatever child View the ContentWrapperView
* is currently displaying.
* @param animationType Type of animation being performed.
*/
public AnimationHelper(InfoBarContainer container, ContentWrapperView target, InfoBar infoBar,
View toShow, int animationType) {
mContainer = container;
mLinearLayout = container.getLinearLayout();
mInfoBar = infoBar;
mTargetWrapperView = target;
mAnimatorSet = new AnimatorSet();
mAnimationType = animationType;
mToShow = toShow;
assert mLinearLayout.indexOfChild(mTargetWrapperView) != -1;
}
/**
* Start the animation.
*/
public void start() {
mTargetWrapperView.prepareTransition(mToShow);
mContainer.prepareTransition(mToShow);
if (mToShow == null) {
// We've got a size already; start the animation immediately.
continueAnimation();
} else {
// Wait for the object to be sized.
mTargetWrapperView.getViewTreeObserver().addOnGlobalLayoutListener(this);
}
}
/**
* @return the InfoBar being animated.
*/
public InfoBar getInfoBar() {
return mInfoBar;
}
/**
* @return the ContentWrapperView being animated.
*/
public ContentWrapperView getTarget() {
return mTargetWrapperView;
}
/**
* @return the type of animation being performed.
*/
public int getAnimationType() {
return mAnimationType;
}
/**
* Catch when the layout occurs, which lets us know when the View has been sized properly.
*/
@Override
public void onGlobalLayout() {
mTargetWrapperView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
continueAnimation();
}
private void continueAnimation() {
if (mAnimationStarted) return;
mAnimationStarted = true;
int indexOfWrapperView = mLinearLayout.indexOfChild(mTargetWrapperView);
assert indexOfWrapperView != -1;
ArrayList<Animator> animators = new ArrayList<Animator>();
mTargetWrapperView.getAnimationsForTransition(animators);
// Determine where the tops of each InfoBar will need to be.
int heightDifference = mTargetWrapperView.getTransitionHeightDifference();
int cumulativeTopStart = 0;
int cumulativeTopEnd = 0;
int cumulativeEndHeight = 0;
if (heightDifference >= 0) {
// The current container is smaller than the final container, so the current 0
// coordinate will be >= 0 in the final container.
cumulativeTopStart = heightDifference;
} else {
// The current container is bigger than the final container, so the current 0
// coordinate will be < 0 in the final container.
cumulativeTopEnd = -heightDifference;
}
for (int i = 0; i < mLinearLayout.getChildCount(); ++i) {
View view = mLinearLayout.getChildAt(i);
// At this point, the View being transitioned in shouldn't have been added to the
// visible container, yet, and shouldn't affect calculations.
int startHeight = view.getHeight();
int endHeight = startHeight + (i == indexOfWrapperView ? heightDifference : 0);
int topStart = cumulativeTopStart;
int topEnd = cumulativeTopEnd;
int bottomStart = topStart + startHeight;
int bottomEnd = topEnd + endHeight;
if (topStart == topEnd && bottomStart == bottomEnd) {
// The View needs to stay put.
view.setTop(topEnd);
view.setBottom(bottomEnd);
view.setY(topEnd);
view.setTranslationY(0);
} else {
// A translation is required to move the View into place.
int translation = heightDifference;
boolean translateDownward;
if (topStart < topEnd) {
translateDownward = false;
} else if (topStart > topEnd) {
translateDownward = true;
} else {
translateDownward = bottomEnd > bottomStart;
}
PropertyValuesHolder viewTranslation;
if (translateDownward) {
view.setTop(topEnd);
view.setBottom(bottomEnd);
view.setTranslationY(translation);
view.setY(topEnd + translation);
viewTranslation =
PropertyValuesHolder.ofFloat("translationY", translation, 0.0f);
} else {
viewTranslation =
PropertyValuesHolder.ofFloat("translationY", 0.0f, -translation);
}
animators.add(ObjectAnimator.ofPropertyValuesHolder(view, viewTranslation));
}
// Add heights to the cumulative totals.
cumulativeTopStart += startHeight;
cumulativeTopEnd += endHeight;
cumulativeEndHeight += endHeight;
}
// Lock the InfoBarContainer's size at its largest during the animation to avoid
// clipping issues.
int oldContainerTop = mLinearLayout.getTop();
int newContainerTop = mLinearLayout.getBottom() - cumulativeEndHeight;
int biggestContainerTop = Math.min(oldContainerTop, newContainerTop);
mLinearLayout.setTop(biggestContainerTop);
// Set up and run all of the animations.
mAnimatorSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
mTargetWrapperView.startTransition();
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onAnimationEnd(Animator animation) {
mTargetWrapperView.finishTransition();
mContainer.finishTransition();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN && mToShow != null
&& (mAnimationType == ANIMATION_TYPE_SHOW
|| mAnimationType == ANIMATION_TYPE_SWAP)) {
TextView messageView = (TextView) mToShow.findViewById(R.id.infobar_message);
if (messageView != null) {
mToShow.announceForAccessibility(messageView.getText());
}
}
}
});
mAnimatorSet.playTogether(animators);
mAnimatorSet.setDuration(ANIMATION_DURATION_MS);
mAnimatorSet.setInterpolator(new AccelerateDecelerateInterpolator());
mAnimatorSet.start();
}
}
| |
/*
* Copyright (c) 2003, 2005, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.nio.cs;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import sun.nio.cs.StandardCharsets;
import sun.nio.cs.SingleByteDecoder;
import sun.nio.cs.SingleByteEncoder;
import sun.nio.cs.HistoricallyNamedCharset;
public class IBM855
extends Charset
implements HistoricallyNamedCharset
{
public IBM855() {
super("IBM855", StandardCharsets.aliases_IBM855);
}
public String historicalName() {
return "Cp855";
}
public boolean contains(Charset cs) {
return (cs instanceof IBM855);
}
public CharsetDecoder newDecoder() {
return new Decoder(this);
}
public CharsetEncoder newEncoder() {
return new Encoder(this);
}
/**
* These accessors are temporarily supplied while sun.io
* converters co-exist with the sun.nio.cs.{ext} charset coders
* These facilitate sharing of conversion tables between the
* two co-existing implementations. When sun.io converters
* are made extinct these will be unncessary and should be removed
*/
public String getDecoderSingleByteMappings() {
return Decoder.byteToCharTable;
}
public short[] getEncoderIndex1() {
return Encoder.index1;
}
public String getEncoderIndex2() {
return Encoder.index2;
}
private static class Decoder extends SingleByteDecoder {
public Decoder(Charset cs) {
super(cs, byteToCharTable);
}
private final static String byteToCharTable =
"\u0452\u0402\u0453\u0403\u0451\u0401\u0454\u0404" + // 0x80 - 0x87
"\u0455\u0405\u0456\u0406\u0457\u0407\u0458\u0408" + // 0x88 - 0x8F
"\u0459\u0409\u045A\u040A\u045B\u040B\u045C\u040C" + // 0x90 - 0x97
"\u045E\u040E\u045F\u040F\u044E\u042E\u044A\u042A" + // 0x98 - 0x9F
"\u0430\u0410\u0431\u0411\u0446\u0426\u0434\u0414" + // 0xA0 - 0xA7
"\u0435\u0415\u0444\u0424\u0433\u0413\u00AB\u00BB" + // 0xA8 - 0xAF
"\u2591\u2592\u2593\u2502\u2524\u0445\u0425\u0438" + // 0xB0 - 0xB7
"\u0418\u2563\u2551\u2557\u255D\u0439\u0419\u2510" + // 0xB8 - 0xBF
"\u2514\u2534\u252C\u251C\u2500\u253C\u043A\u041A" + // 0xC0 - 0xC7
"\u255A\u2554\u2569\u2566\u2560\u2550\u256C\u00A4" + // 0xC8 - 0xCF
"\u043B\u041B\u043C\u041C\u043D\u041D\u043E\u041E" + // 0xD0 - 0xD7
"\u043F\u2518\u250C\u2588\u2584\u041F\u044F\u2580" + // 0xD8 - 0xDF
"\u042F\u0440\u0420\u0441\u0421\u0442\u0422\u0443" + // 0xE0 - 0xE7
"\u0423\u0436\u0416\u0432\u0412\u044C\u042C\u2116" + // 0xE8 - 0xEF
"\u00AD\u044B\u042B\u0437\u0417\u0448\u0428\u044D" + // 0xF0 - 0xF7
"\u042D\u0449\u0429\u0447\u0427\u00A7\u25A0\u00A0" + // 0xF8 - 0xFF
"\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007" + // 0x00 - 0x07
"\b\t\n\u000B\f\r\u000E\u000F" + // 0x08 - 0x0F
"\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017" + // 0x10 - 0x17
"\u0018\u0019\u001A\u001B\u001C\u001D\u001E\u001F" + // 0x18 - 0x1F
"\u0020\u0021\"\u0023\u0024\u0025\u0026\'" + // 0x20 - 0x27
"\u0028\u0029\u002A\u002B\u002C\u002D\u002E\u002F" + // 0x28 - 0x2F
"\u0030\u0031\u0032\u0033\u0034\u0035\u0036\u0037" + // 0x30 - 0x37
"\u0038\u0039\u003A\u003B\u003C\u003D\u003E\u003F" + // 0x38 - 0x3F
"\u0040\u0041\u0042\u0043\u0044\u0045\u0046\u0047" + // 0x40 - 0x47
"\u0048\u0049\u004A\u004B\u004C\u004D\u004E\u004F" + // 0x48 - 0x4F
"\u0050\u0051\u0052\u0053\u0054\u0055\u0056\u0057" + // 0x50 - 0x57
"\u0058\u0059\u005A\u005B\\\u005D\u005E\u005F" + // 0x58 - 0x5F
"\u0060\u0061\u0062\u0063\u0064\u0065\u0066\u0067" + // 0x60 - 0x67
"\u0068\u0069\u006A\u006B\u006C\u006D\u006E\u006F" + // 0x68 - 0x6F
"\u0070\u0071\u0072\u0073\u0074\u0075\u0076\u0077" + // 0x70 - 0x77
"\u0078\u0079\u007A\u007B\u007C\u007D\u007E\u007F"; // 0x78 - 0x7F
}
private static class Encoder extends SingleByteEncoder {
public Encoder(Charset cs) {
super(cs, index1, index2, 0xFF00, 0x00FF, 8);
}
private final static String index2 =
"\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007" +
"\b\t\n\u000B\f\r\u000E\u000F" +
"\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017" +
"\u0018\u0019\u001A\u001B\u001C\u001D\u001E\u001F" +
"\u0020\u0021\"\u0023\u0024\u0025\u0026\'" +
"\u0028\u0029\u002A\u002B\u002C\u002D\u002E\u002F" +
"\u0030\u0031\u0032\u0033\u0034\u0035\u0036\u0037" +
"\u0038\u0039\u003A\u003B\u003C\u003D\u003E\u003F" +
"\u0040\u0041\u0042\u0043\u0044\u0045\u0046\u0047" +
"\u0048\u0049\u004A\u004B\u004C\u004D\u004E\u004F" +
"\u0050\u0051\u0052\u0053\u0054\u0055\u0056\u0057" +
"\u0058\u0059\u005A\u005B\\\u005D\u005E\u005F" +
"\u0060\u0061\u0062\u0063\u0064\u0065\u0066\u0067" +
"\u0068\u0069\u006A\u006B\u006C\u006D\u006E\u006F" +
"\u0070\u0071\u0072\u0073\u0074\u0075\u0076\u0077" +
"\u0078\u0079\u007A\u007B\u007C\u007D\u007E\u007F" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u00FF\u0000\u0000\u0000\u00CF\u0000\u0000\u00FD" +
"\u0000\u0000\u0000\u00AE\u0000\u00F0\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u00AF\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0085\u0081\u0083\u0087" +
"\u0089\u008B\u008D\u008F\u0091\u0093\u0095\u0097" +
"\u0000\u0099\u009B\u00A1\u00A3\u00EC\u00AD\u00A7" +
"\u00A9\u00EA\u00F4\u00B8\u00BE\u00C7\u00D1\u00D3" +
"\u00D5\u00D7\u00DD\u00E2\u00E4\u00E6\u00E8\u00AB" +
"\u00B6\u00A5\u00FC\u00F6\u00FA\u009F\u00F2\u00EE" +
"\u00F8\u009D\u00E0\u00A0\u00A2\u00EB\u00AC\u00A6" +
"\u00A8\u00E9\u00F3\u00B7\u00BD\u00C6\u00D0\u00D2" +
"\u00D4\u00D6\u00D8\u00E1\u00E3\u00E5\u00E7\u00AA" +
"\u00B5\u00A4\u00FB\u00F5\u00F9\u009E\u00F1\u00ED" +
"\u00F7\u009C\u00DE\u0000\u0084\u0080\u0082\u0086" +
"\u0088\u008A\u008C\u008E\u0090\u0092\u0094\u0096" +
"\u0000\u0098\u009A\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u00EF\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u00C4\u0000\u00B3" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u00DA\u0000\u0000\u0000\u00BF\u0000\u0000" +
"\u0000\u00C0\u0000\u0000\u0000\u00D9\u0000\u0000" +
"\u0000\u00C3\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u00B4\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u00C2\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u00C1\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u00C5\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u00CD\u00BA\u0000" +
"\u0000\u00C9\u0000\u0000\u00BB\u0000\u0000\u00C8" +
"\u0000\u0000\u00BC\u0000\u0000\u00CC\u0000\u0000" +
"\u00B9\u0000\u0000\u00CB\u0000\u0000\u00CA\u0000" +
"\u0000\u00CE\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u00DF\u0000\u0000" +
"\u0000\u00DC\u0000\u0000\u0000\u00DB\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u00B0\u00B1" +
"\u00B2\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u00FE\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000";
private final static short index1[] = {
0, 188, 188, 188, 443, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 677, 188, 188, 188, 933, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188,
};
}
}
| |
package com.katsuraf.demoarchitecture.ui.widget;
import android.media.MediaPlayer;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebChromeClient;
import android.widget.FrameLayout;
/**
* This class serves as a WebChromeClient to be set to a WebView, allowing it to play video.
* Video will play differently depending on target API level (in-line, fullscreen, or both).
* <p/>
* It has been tested with the following video classes:
* - android.widget.VideoView (typically API level <11)
* - android.webkit.HTML5VideoFullScreen$VideoSurfaceView/VideoTextureView (typically API level 11-18)
* - com.android.org.chromium.content.browser.ContentVideoView$VideoSurfaceView (typically API level 19+)
* <p/>
* Important notes:
* - For API level 11+, android:hardwareAccelerated="true" must be set in the application manifest.
* - The invoking activity must call VideoEnabledWebChromeClient's onBackPressed() inside of its own onBackPressed().
* - Tested in Android API levels 8-19. Only tested on http://m.youtube.com.
*
* @author Cristian Perez (http://cpr.name)
*/
public class VideoEnabledWebChromeClient extends WebChromeClient implements MediaPlayer.OnPreparedListener, MediaPlayer.OnCompletionListener, MediaPlayer.OnErrorListener {
public interface ToggledFullscreenCallback {
void toggledFullscreen(boolean fullscreen);
}
private View activityNonVideoView;
private ViewGroup activityVideoView;
private View loadingView;
private VideoEnabledWebView webView;
private boolean isVideoFullscreen; // Indicates if the video is being displayed using a custom view (typically full-screen)
private FrameLayout videoViewContainer;
private CustomViewCallback videoViewCallback;
private ToggledFullscreenCallback toggledFullscreenCallback;
/**
* Never use this constructor alone.
* This constructor allows this class to be defined as an inline inner class in which the user can override methods
*/
@SuppressWarnings("unused")
public VideoEnabledWebChromeClient() {
}
/**
* Builds a video enabled WebChromeClient.
*
* @param activityNonVideoView A View in the activity's layout that contains every other view that should be hidden when the video goes full-screen.
* @param activityVideoView A ViewGroup in the activity's layout that will display the video. Typically you would like this to fill the whole layout.
*/
@SuppressWarnings("unused")
public VideoEnabledWebChromeClient(View activityNonVideoView, ViewGroup activityVideoView) {
this.activityNonVideoView = activityNonVideoView;
this.activityVideoView = activityVideoView;
this.loadingView = null;
this.webView = null;
this.isVideoFullscreen = false;
}
/**
* Builds a video enabled WebChromeClient.
*
* @param activityNonVideoView A View in the activity's layout that contains every other view that should be hidden when the video goes full-screen.
* @param activityVideoView A ViewGroup in the activity's layout that will display the video. Typically you would like this to fill the whole layout.
* @param loadingView A View to be shown while the video is loading (typically only used in API level <11). Must be already inflated and not attached to a parent view.
*/
@SuppressWarnings("unused")
public VideoEnabledWebChromeClient(View activityNonVideoView, ViewGroup activityVideoView, View loadingView) {
this.activityNonVideoView = activityNonVideoView;
this.activityVideoView = activityVideoView;
this.loadingView = loadingView;
this.webView = null;
this.isVideoFullscreen = false;
}
/**
* Builds a video enabled WebChromeClient.
*
* @param activityNonVideoView A View in the activity's layout that contains every other view that should be hidden when the video goes full-screen.
* @param activityVideoView A ViewGroup in the activity's layout that will display the video. Typically you would like this to fill the whole layout.
* @param loadingView A View to be shown while the video is loading (typically only used in API level <11). Must be already inflated and not attached to a parent view.
* @param webView The owner VideoEnabledWebView. Passing it will enable the VideoEnabledWebChromeClient to detect the HTML5 video ended event and exit full-screen.
* Note: The web page must only contain one video tag in order for the HTML5 video ended event to work. This could be improved if needed (see Javascript code).
*/
@SuppressWarnings("unused")
public VideoEnabledWebChromeClient(View activityNonVideoView, ViewGroup activityVideoView, View loadingView, VideoEnabledWebView webView) {
this.activityNonVideoView = activityNonVideoView;
this.activityVideoView = activityVideoView;
this.loadingView = loadingView;
this.webView = webView;
this.isVideoFullscreen = false;
}
/**
* Indicates if the video is being displayed using a custom view (typically full-screen)
*
* @return true it the video is being displayed using a custom view (typically full-screen)
*/
public boolean isVideoFullscreen() {
return isVideoFullscreen;
}
/**
* Set a callback that will be fired when the video starts or finishes displaying using a custom view (typically full-screen)
*
* @param callback A VideoEnabledWebChromeClient.ToggledFullscreenCallback callback
*/
@SuppressWarnings("unused")
public void setOnToggledFullscreen(ToggledFullscreenCallback callback) {
this.toggledFullscreenCallback = callback;
}
@Override
public void onShowCustomView(View view, CustomViewCallback callback) {
if (view instanceof FrameLayout) {
// A video wants to be shown
FrameLayout frameLayout = (FrameLayout) view;
View focusedChild = frameLayout.getFocusedChild();
// Save video related variables
this.isVideoFullscreen = true;
this.videoViewContainer = frameLayout;
this.videoViewCallback = callback;
// Hide the non-video view, add the video view, and show it
activityNonVideoView.setVisibility(View.INVISIBLE);
activityVideoView.addView(videoViewContainer, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
activityVideoView.setVisibility(View.VISIBLE);
if (focusedChild instanceof android.widget.VideoView) {
// android.widget.VideoView (typically API level <11)
android.widget.VideoView videoView = (android.widget.VideoView) focusedChild;
// Handle all the required events
videoView.setOnPreparedListener(this);
videoView.setOnCompletionListener(this);
videoView.setOnErrorListener(this);
} else {
// Other classes, including:
// - android.webkit.HTML5VideoFullScreen$VideoSurfaceView, which inherits from android.view.SurfaceView (typically API level 11-18)
// - android.webkit.HTML5VideoFullScreen$VideoTextureView, which inherits from android.view.TextureView (typically API level 11-18)
// - com.android.org.chromium.content.browser.ContentVideoView$VideoSurfaceView, which inherits from android.view.SurfaceView (typically API level 19+)
// Handle HTML5 video ended event only if the class is a SurfaceView
// Test case: TextureView of Sony Xperia T API level 16 doesn't work fullscreen when loading the javascript below
if (webView != null && webView.getSettings().getJavaScriptEnabled() && focusedChild instanceof SurfaceView) {
// Run javascript code that detects the video end and notifies the Javascript interface
String js = "javascript:";
js += "var _ytrp_html5_video_last;";
js += "var _ytrp_html5_video = document.getElementsByTagName('video')[0];";
js += "if (_ytrp_html5_video != undefined && _ytrp_html5_video != _ytrp_html5_video_last) {";
{
js += "_ytrp_html5_video_last = _ytrp_html5_video;";
js += "function _ytrp_html5_video_ended() {";
{
js += "_VideoEnabledWebView.notifyVideoEnd();"; // Must match Javascript interface name and method of VideoEnableWebView
}
js += "}";
js += "_ytrp_html5_video.addEventListener('ended', _ytrp_html5_video_ended);";
}
js += "}";
webView.loadUrl(js);
}
}
// Notify full-screen change
if (toggledFullscreenCallback != null) {
toggledFullscreenCallback.toggledFullscreen(true);
}
}
}
@Override
@SuppressWarnings("deprecation")
public void onShowCustomView(View view, int requestedOrientation, CustomViewCallback callback) // Available in API level 14+, deprecated in API level 18+
{
onShowCustomView(view, callback);
}
@Override
public void onHideCustomView() {
// This method should be manually called on video end in all cases because it's not always called automatically.
// This method must be manually called on back key press (from this class' onBackPressed() method).
if (isVideoFullscreen) {
// Hide the video view, remove it, and show the non-video view
activityVideoView.setVisibility(View.INVISIBLE);
activityVideoView.removeView(videoViewContainer);
activityNonVideoView.setVisibility(View.VISIBLE);
// Call back (only in API level <19, because in API level 19+ with chromium webview it crashes)
if (videoViewCallback != null && !videoViewCallback.getClass().getName().contains(".chromium.")) {
videoViewCallback.onCustomViewHidden();
}
// Reset video related variables
isVideoFullscreen = false;
videoViewContainer = null;
videoViewCallback = null;
// Notify full-screen change
if (toggledFullscreenCallback != null) {
toggledFullscreenCallback.toggledFullscreen(false);
}
}
}
@Override
public View getVideoLoadingProgressView() // Video will start loading
{
if (loadingView != null) {
loadingView.setVisibility(View.VISIBLE);
return loadingView;
} else {
return super.getVideoLoadingProgressView();
}
}
@Override
public void onPrepared(MediaPlayer mp) // Video will start playing, only called in the case of android.widget.VideoView (typically API level <11)
{
if (loadingView != null) {
loadingView.setVisibility(View.GONE);
}
}
@Override
public void onCompletion(MediaPlayer mp) // Video finished playing, only called in the case of android.widget.VideoView (typically API level <11)
{
onHideCustomView();
}
@Override
public boolean onError(MediaPlayer mp, int what, int extra) // Error while playing video, only called in the case of android.widget.VideoView (typically API level <11)
{
return false; // By returning false, onCompletion() will be called
}
/**
* Notifies the class that the back key has been pressed by the user.
* This must be called from the Activity's onBackPressed(), and if it returns false, the activity itself should handle it. Otherwise don't do anything.
*
* @return Returns true if the event was handled, and false if was not (video view is not visible)
*/
@SuppressWarnings("unused")
public boolean onBackPressed() {
if (isVideoFullscreen) {
onHideCustomView();
return true;
} else {
return false;
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnTlvPartnerKeyVer14 implements OFBsnTlvPartnerKey {
private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvPartnerKeyVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 6;
private final static int DEFAULT_VALUE = 0x0;
// OF message fields
private final int value;
//
// Immutable default instance
final static OFBsnTlvPartnerKeyVer14 DEFAULT = new OFBsnTlvPartnerKeyVer14(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFBsnTlvPartnerKeyVer14(int value) {
this.value = value;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0x33;
}
@Override
public int getValue() {
return value;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFBsnTlvPartnerKey.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnTlvPartnerKey.Builder {
final OFBsnTlvPartnerKeyVer14 parentMessage;
// OF message fields
private boolean valueSet;
private int value;
BuilderWithParent(OFBsnTlvPartnerKeyVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0x33;
}
@Override
public int getValue() {
return value;
}
@Override
public OFBsnTlvPartnerKey.Builder setValue(int value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFBsnTlvPartnerKey build() {
int value = this.valueSet ? this.value : parentMessage.value;
//
return new OFBsnTlvPartnerKeyVer14(
value
);
}
}
static class Builder implements OFBsnTlvPartnerKey.Builder {
// OF message fields
private boolean valueSet;
private int value;
@Override
public int getType() {
return 0x33;
}
@Override
public int getValue() {
return value;
}
@Override
public OFBsnTlvPartnerKey.Builder setValue(int value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFBsnTlvPartnerKey build() {
int value = this.valueSet ? this.value : DEFAULT_VALUE;
return new OFBsnTlvPartnerKeyVer14(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnTlvPartnerKey> {
@Override
public OFBsnTlvPartnerKey readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0x33
short type = bb.readShort();
if(type != (short) 0x33)
throw new OFParseError("Wrong type: Expected=0x33(0x33), got="+type);
int length = U16.f(bb.readShort());
if(length != 6)
throw new OFParseError("Wrong length: Expected=6(6), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
int value = U16.f(bb.readShort());
OFBsnTlvPartnerKeyVer14 bsnTlvPartnerKeyVer14 = new OFBsnTlvPartnerKeyVer14(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnTlvPartnerKeyVer14);
return bsnTlvPartnerKeyVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnTlvPartnerKeyVer14Funnel FUNNEL = new OFBsnTlvPartnerKeyVer14Funnel();
static class OFBsnTlvPartnerKeyVer14Funnel implements Funnel<OFBsnTlvPartnerKeyVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnTlvPartnerKeyVer14 message, PrimitiveSink sink) {
// fixed value property type = 0x33
sink.putShort((short) 0x33);
// fixed value property length = 6
sink.putShort((short) 0x6);
sink.putInt(message.value);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnTlvPartnerKeyVer14> {
@Override
public void write(ByteBuf bb, OFBsnTlvPartnerKeyVer14 message) {
// fixed value property type = 0x33
bb.writeShort((short) 0x33);
// fixed value property length = 6
bb.writeShort((short) 0x6);
bb.writeShort(U16.t(message.value));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnTlvPartnerKeyVer14(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnTlvPartnerKeyVer14 other = (OFBsnTlvPartnerKeyVer14) obj;
if( value != other.value)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + value;
return result;
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.select;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.base.Preconditions;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.query.BaseQuery;
import io.druid.query.DataSource;
import io.druid.query.Druids;
import io.druid.query.Query;
import io.druid.query.Result;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.filter.DimFilter;
import io.druid.query.spec.QuerySegmentSpec;
import io.druid.segment.VirtualColumns;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
*/
@JsonTypeName("select")
public class SelectQuery extends BaseQuery<Result<SelectResultValue>>
{
private final DimFilter dimFilter;
private final Granularity granularity;
private final List<DimensionSpec> dimensions;
private final List<String> metrics;
private final VirtualColumns virtualColumns;
private final PagingSpec pagingSpec;
@JsonCreator
public SelectQuery(
@JsonProperty("dataSource") DataSource dataSource,
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
@JsonProperty("descending") boolean descending,
@JsonProperty("filter") DimFilter dimFilter,
@JsonProperty("granularity") Granularity granularity,
@JsonProperty("dimensions") List<DimensionSpec> dimensions,
@JsonProperty("metrics") List<String> metrics,
@JsonProperty("virtualColumns") VirtualColumns virtualColumns,
@JsonProperty("pagingSpec") PagingSpec pagingSpec,
@JsonProperty("context") Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, descending, context);
this.dimFilter = dimFilter;
this.granularity = granularity == null ? Granularities.ALL : granularity;
this.dimensions = dimensions;
this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
this.metrics = metrics;
this.pagingSpec = pagingSpec;
Preconditions.checkNotNull(pagingSpec, "must specify a pagingSpec");
Preconditions.checkArgument(checkPagingSpec(pagingSpec, descending), "invalid pagingSpec");
}
private boolean checkPagingSpec(PagingSpec pagingSpec, boolean descending)
{
for (Integer value : pagingSpec.getPagingIdentifiers().values()) {
if (descending ^ (value < 0)) {
return false;
}
}
return pagingSpec.getThreshold() >= 0;
}
@Override
public boolean hasFilters()
{
return dimFilter != null;
}
@Override
public DimFilter getFilter()
{
return dimFilter;
}
@Override
public String getType()
{
return Query.SELECT;
}
@JsonProperty("filter")
public DimFilter getDimensionsFilter()
{
return dimFilter;
}
@JsonProperty
public Granularity getGranularity()
{
return granularity;
}
@JsonProperty
public List<DimensionSpec> getDimensions()
{
return dimensions;
}
@JsonProperty
public PagingSpec getPagingSpec()
{
return pagingSpec;
}
@JsonProperty
public List<String> getMetrics()
{
return metrics;
}
@JsonProperty
public VirtualColumns getVirtualColumns()
{
return virtualColumns;
}
public PagingOffset getPagingOffset(String identifier)
{
return pagingSpec.getOffset(identifier, isDescending());
}
@Override
public SelectQuery withQuerySegmentSpec(QuerySegmentSpec querySegmentSpec)
{
return Druids.SelectQueryBuilder.copy(this).intervals(querySegmentSpec).build();
}
@Override
public Query<Result<SelectResultValue>> withDataSource(DataSource dataSource)
{
return Druids.SelectQueryBuilder.copy(this).dataSource(dataSource).build();
}
@Override
public SelectQuery withOverriddenContext(Map<String, Object> contextOverrides)
{
Map<String, Object> newContext = computeOverriddenContext(getContext(), contextOverrides);
return Druids.SelectQueryBuilder.copy(this).context(newContext).build();
}
public SelectQuery withPagingSpec(PagingSpec pagingSpec)
{
return Druids.SelectQueryBuilder.copy(this).pagingSpec(pagingSpec).build();
}
public SelectQuery withDimFilter(DimFilter dimFilter)
{
return Druids.SelectQueryBuilder.copy(this).filters(dimFilter).build();
}
@Override
public String toString()
{
return "SelectQuery{" +
"dataSource='" + getDataSource() + '\'' +
", querySegmentSpec=" + getQuerySegmentSpec() +
", descending=" + isDescending() +
", dimFilter=" + dimFilter +
", granularity=" + granularity +
", dimensions=" + dimensions +
", metrics=" + metrics +
", virtualColumns=" + virtualColumns +
", pagingSpec=" + pagingSpec +
'}';
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
SelectQuery that = (SelectQuery) o;
if (!Objects.equals(dimFilter, that.dimFilter)) {
return false;
}
if (!Objects.equals(granularity, that.granularity)) {
return false;
}
if (!Objects.equals(dimensions, that.dimensions)) {
return false;
}
if (!Objects.equals(metrics, that.metrics)) {
return false;
}
if (!Objects.equals(virtualColumns, that.virtualColumns)) {
return false;
}
if (!Objects.equals(pagingSpec, that.pagingSpec)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = super.hashCode();
result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0);
result = 31 * result + (granularity != null ? granularity.hashCode() : 0);
result = 31 * result + (dimensions != null ? dimensions.hashCode() : 0);
result = 31 * result + (metrics != null ? metrics.hashCode() : 0);
result = 31 * result + (virtualColumns != null ? virtualColumns.hashCode() : 0);
result = 31 * result + (pagingSpec != null ? pagingSpec.hashCode() : 0);
return result;
}
}
| |
/*
* ConnectBot: simple, powerful, open-source SSH client for Android
* Copyright 2007 Kenny Root, Jeffrey Sharkey
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iiordanov.bssh.transport;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.iiordanov.bssh.R;
import com.iiordanov.bssh.bean.HostBean;
import com.iiordanov.bssh.bean.PortForwardBean;
import com.iiordanov.bssh.bean.PubkeyBean;
import com.iiordanov.bssh.service.TerminalBridge;
import com.iiordanov.bssh.service.TerminalManager;
import com.iiordanov.bssh.service.TerminalManager.KeyHolder;
import com.iiordanov.bssh.util.HostDatabase;
import com.iiordanov.bssh.util.PubkeyDatabase;
import com.iiordanov.bssh.util.PubkeyUtils;
import android.content.Context;
import android.net.Uri;
import android.os.Environment;
import android.util.Log;
import com.trilead.ssh2.AuthAgentCallback;
import com.trilead.ssh2.ChannelCondition;
import com.trilead.ssh2.Connection;
import com.trilead.ssh2.ConnectionInfo;
import com.trilead.ssh2.ConnectionMonitor;
import com.trilead.ssh2.DynamicPortForwarder;
import com.trilead.ssh2.InteractiveCallback;
import com.trilead.ssh2.KnownHosts;
import com.trilead.ssh2.LocalPortForwarder;
import com.trilead.ssh2.SCPClient;
import com.trilead.ssh2.ServerHostKeyVerifier;
import com.trilead.ssh2.Session;
import com.trilead.ssh2.crypto.PEMDecoder;
import com.trilead.ssh2.signature.DSAPrivateKey;
import com.trilead.ssh2.signature.DSAPublicKey;
import com.trilead.ssh2.signature.DSASHA1Verify;
import com.trilead.ssh2.signature.RSAPrivateKey;
import com.trilead.ssh2.signature.RSAPublicKey;
import com.trilead.ssh2.signature.RSASHA1Verify;
/**
* @author Kenny Root
*
*/
public class SSH extends AbsTransport implements ConnectionMonitor, InteractiveCallback, AuthAgentCallback {
public SSH() {
super();
}
/**
* @param bridge
* @param db
*/
public SSH(HostBean host, TerminalBridge bridge, TerminalManager manager) {
super(host, bridge, manager);
}
private static final String PROTOCOL = "ssh";
private static final String TAG = "ConnectBot.SSH";
private static final int DEFAULT_PORT = 22;
private static final String AUTH_PUBLICKEY = "publickey",
AUTH_PASSWORD = "password",
AUTH_KEYBOARDINTERACTIVE = "keyboard-interactive";
private final static int AUTH_TRIES = 20;
static final Pattern hostmask;
static {
hostmask = Pattern.compile("^(.+)@([0-9a-z.-]+)(:(\\d+))?$", Pattern.CASE_INSENSITIVE);
}
private boolean compression = false;
private volatile boolean authenticated = false;
private volatile boolean connected = false;
private volatile boolean sessionOpen = false;
private boolean pubkeysExhausted = false;
private boolean interactiveCanContinue = true;
private Connection connection;
private Session session;
private ConnectionInfo connectionInfo;
private OutputStream stdin;
private InputStream stdout;
private InputStream stderr;
private static final int conditions = ChannelCondition.STDOUT_DATA
| ChannelCondition.STDERR_DATA
| ChannelCondition.CLOSED
| ChannelCondition.EOF;
private List<PortForwardBean> portForwards = new LinkedList<PortForwardBean>();
private int columns;
private int rows;
private int width;
private int height;
private String useAuthAgent = HostDatabase.AUTHAGENT_NO;
private String agentLockPassphrase;
public class HostKeyVerifier implements ServerHostKeyVerifier {
public boolean verifyServerHostKey(String hostname, int port,
String serverHostKeyAlgorithm, byte[] serverHostKey) throws IOException {
// read in all known hosts from hostdb
KnownHosts hosts = manager.hostdb.getKnownHosts();
Boolean result;
String matchName = String.format("%s:%d", hostname, port);
String fingerprint = KnownHosts.createHexFingerprint(serverHostKeyAlgorithm, serverHostKey);
String algorithmName;
if ("ssh-rsa".equals(serverHostKeyAlgorithm))
algorithmName = "RSA";
else if ("ssh-dss".equals(serverHostKeyAlgorithm))
algorithmName = "DSA";
else
algorithmName = serverHostKeyAlgorithm;
switch(hosts.verifyHostkey(matchName, serverHostKeyAlgorithm, serverHostKey)) {
case KnownHosts.HOSTKEY_IS_OK:
bridge.outputLine(manager.res.getString(R.string.terminal_sucess, algorithmName, fingerprint));
return true;
case KnownHosts.HOSTKEY_IS_NEW:
// prompt user
bridge.outputLine(manager.res.getString(R.string.host_authenticity_warning, hostname));
bridge.outputLine(manager.res.getString(R.string.host_fingerprint, algorithmName, fingerprint));
result = bridge.promptHelper.requestBooleanPrompt(null, manager.res.getString(R.string.prompt_continue_connecting));
if(result == null) return false;
if(result.booleanValue()) {
// save this key in known database
manager.hostdb.saveKnownHost(hostname, port, serverHostKeyAlgorithm, serverHostKey);
}
return result.booleanValue();
case KnownHosts.HOSTKEY_HAS_CHANGED:
String header = String.format("@ %s @",
manager.res.getString(R.string.host_verification_failure_warning_header));
char[] atsigns = new char[header.length()];
Arrays.fill(atsigns, '@');
String border = new String(atsigns);
bridge.outputLine(border);
bridge.outputLine(manager.res.getString(R.string.host_verification_failure_warning));
bridge.outputLine(border);
bridge.outputLine(String.format(manager.res.getString(R.string.host_fingerprint),
algorithmName, fingerprint));
// Users have no way to delete keys, so we'll prompt them for now.
result = bridge.promptHelper.requestBooleanPrompt(null, manager.res.getString(R.string.prompt_continue_connecting));
if(result == null) return false;
if(result.booleanValue()) {
// save this key in known database
manager.hostdb.saveKnownHost(hostname, port, serverHostKeyAlgorithm, serverHostKey);
}
return result.booleanValue();
default:
return false;
}
}
}
private void authenticate() {
try {
if (connection.authenticateWithNone(host.getUsername())) {
finishConnection();
return;
}
} catch(Exception e) {
Log.d(TAG, "Host does not support 'none' authentication.");
}
bridge.outputLine(manager.res.getString(R.string.terminal_auth));
try {
long pubkeyId = host.getPubkeyId();
if (!pubkeysExhausted &&
pubkeyId != HostDatabase.PUBKEYID_NEVER &&
connection.isAuthMethodAvailable(host.getUsername(), AUTH_PUBLICKEY)) {
// if explicit pubkey defined for this host, then prompt for password as needed
// otherwise just try all in-memory keys held in terminalmanager
if (pubkeyId == HostDatabase.PUBKEYID_ANY) {
// try each of the in-memory keys
bridge.outputLine(manager.res
.getString(R.string.terminal_auth_pubkey_any));
for (Entry<String, KeyHolder> entry : manager.loadedKeypairs.entrySet()) {
if (entry.getValue().bean.isConfirmUse()
&& !promptForPubkeyUse(entry.getKey()))
continue;
if (this.tryPublicKey(host.getUsername(), entry.getKey(),
entry.getValue().trileadKey)) {
finishConnection();
break;
}
}
} else {
bridge.outputLine(manager.res.getString(R.string.terminal_auth_pubkey_specific));
// use a specific key for this host, as requested
PubkeyBean pubkey = manager.pubkeydb.findPubkeyById(pubkeyId);
if (pubkey == null)
bridge.outputLine(manager.res.getString(R.string.terminal_auth_pubkey_invalid));
else
if (tryPublicKey(pubkey))
finishConnection();
}
pubkeysExhausted = true;
} else if (interactiveCanContinue &&
connection.isAuthMethodAvailable(host.getUsername(), AUTH_KEYBOARDINTERACTIVE)) {
// this auth method will talk with us using InteractiveCallback interface
// it blocks until authentication finishes
bridge.outputLine(manager.res.getString(R.string.terminal_auth_ki));
interactiveCanContinue = false;
if(connection.authenticateWithKeyboardInteractive(host.getUsername(), this)) {
finishConnection();
} else {
bridge.outputLine(manager.res.getString(R.string.terminal_auth_ki_fail));
}
} else if (connection.isAuthMethodAvailable(host.getUsername(), AUTH_PASSWORD)) {
bridge.outputLine(manager.res.getString(R.string.terminal_auth_pass));
String password = bridge.getPromptHelper().requestPasswordPrompt(null,
manager.res.getString(R.string.prompt_password));
if (password != null
&& connection.authenticateWithPassword(host.getUsername(), password)) {
finishConnection();
} else {
bridge.outputLine(manager.res.getString(R.string.terminal_auth_pass_fail));
}
} else {
bridge.outputLine(manager.res.getString(R.string.terminal_auth_fail));
}
} catch (IllegalStateException e) {
Log.e(TAG, "Connection went away while we were trying to authenticate", e);
return;
} catch(Exception e) {
Log.e(TAG, "Problem during handleAuthentication()", e);
}
}
/**
* Attempt connection with database row pointed to by cursor.
* @param cursor
* @return true for successful authentication
* @throws NoSuchAlgorithmException
* @throws InvalidKeySpecException
* @throws IOException
*/
private boolean tryPublicKey(PubkeyBean pubkey) throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
Object trileadKey = null;
if(manager.isKeyLoaded(pubkey.getNickname())) {
// load this key from memory if its already there
Log.d(TAG, String.format("Found unlocked key '%s' already in-memory", pubkey.getNickname()));
if (pubkey.isConfirmUse()) {
if (!promptForPubkeyUse(pubkey.getNickname()))
return false;
}
trileadKey = manager.getKey(pubkey.getNickname());
} else {
// otherwise load key from database and prompt for password as needed
String password = null;
if (pubkey.isEncrypted()) {
password = bridge.getPromptHelper().requestPasswordPrompt(null,
manager.res.getString(R.string.prompt_pubkey_password, pubkey.getNickname()));
// Something must have interrupted the prompt.
if (password == null)
return false;
}
if(PubkeyDatabase.KEY_TYPE_IMPORTED.equals(pubkey.getType())) {
// load specific key using pem format
trileadKey = PEMDecoder.decode(new String(pubkey.getPrivateKey()).toCharArray(), password);
} else {
// load using internal generated format
PrivateKey privKey;
try {
privKey = PubkeyUtils.decodePrivate(pubkey.getPrivateKey(),
pubkey.getType(), password);
} catch (Exception e) {
String message = String.format("Bad password for key '%s'. Authentication failed.", pubkey.getNickname());
Log.e(TAG, message, e);
bridge.outputLine(message);
return false;
}
PublicKey pubKey = pubkey.getPublicKey();
// convert key to trilead format
trileadKey = PubkeyUtils.convertToTrilead(privKey, pubKey);
Log.d(TAG, "Unlocked key " + PubkeyUtils.formatKey(pubKey));
}
Log.d(TAG, String.format("Unlocked key '%s'", pubkey.getNickname()));
// save this key in memory
manager.addKey(pubkey, trileadKey);
}
return tryPublicKey(host.getUsername(), pubkey.getNickname(), trileadKey);
}
private boolean tryPublicKey(String username, String keyNickname, Object trileadKey) throws IOException {
//bridge.outputLine(String.format("Attempting 'publickey' with key '%s' [%s]...", keyNickname, trileadKey.toString()));
boolean success = connection.authenticateWithPublicKey(username, trileadKey);
if(!success)
bridge.outputLine(manager.res.getString(R.string.terminal_auth_pubkey_fail, keyNickname));
return success;
}
/**
* Internal method to request actual PTY terminal once we've finished
* authentication. If called before authenticated, it will just fail.
*/
private void finishConnection() {
authenticated = true;
for (PortForwardBean portForward : portForwards) {
try {
enablePortForward(portForward);
bridge.outputLine(manager.res.getString(R.string.terminal_enable_portfoward, portForward.getDescription()));
} catch (Exception e) {
Log.e(TAG, "Error setting up port forward during connect", e);
}
}
if (!host.getWantSession()) {
bridge.outputLine(manager.res.getString(R.string.terminal_no_session));
bridge.onConnected();
return;
}
try {
session = connection.openSession();
if (!useAuthAgent.equals(HostDatabase.AUTHAGENT_NO))
session.requestAuthAgentForwarding(this);
session.requestPTY(getEmulation(), columns, rows, width, height, null);
session.startShell();
stdin = session.getStdin();
stdout = session.getStdout();
stderr = session.getStderr();
sessionOpen = true;
bridge.onConnected();
} catch (IOException e1) {
Log.e(TAG, "Problem while trying to create PTY in finishConnection()", e1);
}
}
@Override
public void connect() {
connection = new Connection(host.getHostname(), host.getPort());
connection.addConnectionMonitor(this);
try {
connection.setCompression(compression);
} catch (IOException e) {
Log.e(TAG, "Could not enable compression!", e);
}
try {
/* Uncomment when debugging SSH protocol:
DebugLogger logger = new DebugLogger() {
public void log(int level, String className, String message) {
Log.d("SSH", message);
}
};
Logger.enabled = true;
Logger.logger = logger;
*/
connectionInfo = connection.connect(new HostKeyVerifier());
connected = true;
if (connectionInfo.clientToServerCryptoAlgorithm
.equals(connectionInfo.serverToClientCryptoAlgorithm)
&& connectionInfo.clientToServerMACAlgorithm
.equals(connectionInfo.serverToClientMACAlgorithm)) {
bridge.outputLine(manager.res.getString(R.string.terminal_using_algorithm,
connectionInfo.clientToServerCryptoAlgorithm,
connectionInfo.clientToServerMACAlgorithm));
} else {
bridge.outputLine(manager.res.getString(
R.string.terminal_using_c2s_algorithm,
connectionInfo.clientToServerCryptoAlgorithm,
connectionInfo.clientToServerMACAlgorithm));
bridge.outputLine(manager.res.getString(
R.string.terminal_using_s2c_algorithm,
connectionInfo.serverToClientCryptoAlgorithm,
connectionInfo.serverToClientMACAlgorithm));
}
} catch (IOException e) {
Log.e(TAG, "Problem in SSH connection thread during authentication", e);
// Display the reason in the text.
bridge.outputLine(e.getCause().getMessage());
onDisconnect();
return;
}
try {
// enter a loop to keep trying until authentication
int tries = 0;
while (connected && !connection.isAuthenticationComplete() && tries++ < AUTH_TRIES) {
authenticate();
// sleep to make sure we dont kill system
Thread.sleep(1000);
}
} catch(Exception e) {
Log.e(TAG, "Problem in SSH connection thread during authentication", e);
}
}
@Override
public void close() {
connected = false;
if (session != null) {
session.close();
session = null;
}
if (connection != null) {
connection.close();
connection = null;
}
}
private void onDisconnect() {
close();
bridge.dispatchDisconnect(false);
}
@Override
public void flush() throws IOException {
if (stdin != null)
stdin.flush();
}
@Override
public int read(byte[] buffer, int start, int len) throws IOException {
int bytesRead = 0;
if (session == null)
return 0;
int newConditions = session.waitForCondition(conditions, 0);
if ((newConditions & ChannelCondition.STDOUT_DATA) != 0) {
bytesRead = stdout.read(buffer, start, len);
}
if ((newConditions & ChannelCondition.STDERR_DATA) != 0) {
byte discard[] = new byte[256];
while (stderr.available() > 0) {
stderr.read(discard);
}
}
if ((newConditions & ChannelCondition.EOF) != 0) {
onDisconnect();
throw new IOException("Remote end closed connection");
}
return bytesRead;
}
@Override
public void write(byte[] buffer) throws IOException {
if (stdin != null)
stdin.write(buffer);
}
@Override
public void write(int c) throws IOException {
if (stdin != null)
stdin.write(c);
}
@Override
public Map<String, String> getOptions() {
Map<String, String> options = new HashMap<String, String>();
options.put("compression", Boolean.toString(compression));
return options;
}
@Override
public void setOptions(Map<String, String> options) {
if (options.containsKey("compression"))
compression = Boolean.parseBoolean(options.get("compression"));
}
public static String getProtocolName() {
return PROTOCOL;
}
@Override
public boolean isSessionOpen() {
return sessionOpen;
}
@Override
public boolean isConnected() {
return connected;
}
public void connectionLost(Throwable reason) {
onDisconnect();
}
@Override
public boolean canForwardPorts() {
return true;
}
@Override
public List<PortForwardBean> getPortForwards() {
return portForwards;
}
@Override
public boolean addPortForward(PortForwardBean portForward) {
return portForwards.add(portForward);
}
@Override
public boolean removePortForward(PortForwardBean portForward) {
// Make sure we don't have a phantom forwarder.
disablePortForward(portForward);
return portForwards.remove(portForward);
}
@Override
public boolean enablePortForward(PortForwardBean portForward) {
if (!portForwards.contains(portForward)) {
Log.e(TAG, "Attempt to enable port forward not in list");
return false;
}
if (!authenticated)
return false;
if (HostDatabase.PORTFORWARD_LOCAL.equals(portForward.getType())) {
LocalPortForwarder lpf = null;
try {
lpf = connection.createLocalPortForwarder(
new InetSocketAddress(InetAddress.getLocalHost(), portForward.getSourcePort()),
portForward.getDestAddr(), portForward.getDestPort());
} catch (Exception e) {
Log.e(TAG, "Could not create local port forward", e);
return false;
}
if (lpf == null) {
Log.e(TAG, "returned LocalPortForwarder object is null");
return false;
}
portForward.setIdentifier(lpf);
portForward.setEnabled(true);
return true;
} else if (HostDatabase.PORTFORWARD_REMOTE.equals(portForward.getType())) {
try {
connection.requestRemotePortForwarding("", portForward.getSourcePort(), portForward.getDestAddr(), portForward.getDestPort());
} catch (Exception e) {
Log.e(TAG, "Could not create remote port forward", e);
return false;
}
portForward.setEnabled(true);
return true;
} else if (HostDatabase.PORTFORWARD_DYNAMIC5.equals(portForward.getType())) {
DynamicPortForwarder dpf = null;
try {
dpf = connection.createDynamicPortForwarder(
new InetSocketAddress(InetAddress.getLocalHost(), portForward.getSourcePort()));
} catch (Exception e) {
Log.e(TAG, "Could not create dynamic port forward", e);
return false;
}
portForward.setIdentifier(dpf);
portForward.setEnabled(true);
return true;
} else {
// Unsupported type
Log.e(TAG, String.format("attempt to forward unknown type %s", portForward.getType()));
return false;
}
}
@Override
public boolean disablePortForward(PortForwardBean portForward) {
if (!portForwards.contains(portForward)) {
Log.e(TAG, "Attempt to disable port forward not in list");
return false;
}
if (!authenticated)
return false;
if (HostDatabase.PORTFORWARD_LOCAL.equals(portForward.getType())) {
LocalPortForwarder lpf = null;
lpf = (LocalPortForwarder)portForward.getIdentifier();
if (!portForward.isEnabled() || lpf == null) {
Log.d(TAG, String.format("Could not disable %s; it appears to be not enabled or have no handler", portForward.getNickname()));
return false;
}
portForward.setEnabled(false);
try {
lpf.close();
} catch (IOException e) {
Log.e(TAG, "Could not stop local port forwarder, setting enabled to false", e);
return false;
}
return true;
} else if (HostDatabase.PORTFORWARD_REMOTE.equals(portForward.getType())) {
portForward.setEnabled(false);
try {
connection.cancelRemotePortForwarding(portForward.getSourcePort());
} catch (IOException e) {
Log.e(TAG, "Could not stop remote port forwarding, setting enabled to false", e);
return false;
}
return true;
} else if (HostDatabase.PORTFORWARD_DYNAMIC5.equals(portForward.getType())) {
DynamicPortForwarder dpf = null;
dpf = (DynamicPortForwarder)portForward.getIdentifier();
if (!portForward.isEnabled() || dpf == null) {
Log.d(TAG, String.format("Could not disable %s; it appears to be not enabled or have no handler", portForward.getNickname()));
return false;
}
portForward.setEnabled(false);
try {
dpf.close();
} catch (IOException e) {
Log.e(TAG, "Could not stop dynamic port forwarder, setting enabled to false", e);
return false;
}
return true;
} else {
// Unsupported type
Log.e(TAG, String.format("attempt to forward unknown type %s", portForward.getType()));
return false;
}
}
@Override
public boolean canTransferFiles() {
return true;
}
@Override
public boolean downloadFile(String remoteFile, String localFolder) {
try {
SCPClient client = new SCPClient(connection);
if (localFolder == null || localFolder == "")
localFolder = Environment.getExternalStorageDirectory().getAbsolutePath();
File dir = new File(localFolder);
dir.mkdirs();
client.get(remoteFile, localFolder);
return true;
} catch (IOException e) {
Log.e(TAG, "Could not download remote file", e);
return false;
}
}
@Override
public boolean uploadFile(String localFile, String remoteFile,
String remoteFolder, String mode) {
try {
SCPClient client = new SCPClient(connection);
if (remoteFolder == null)
remoteFolder = "";
if (remoteFile == null || remoteFile == "")
client.put(localFile, remoteFolder, mode);
else
client.put(localFile, remoteFile, remoteFolder, mode);
return true;
} catch (IOException e) {
Log.e(TAG, "Could not upload local file", e);
return false;
}
}
@Override
public void setDimensions(int columns, int rows, int width, int height) {
this.columns = columns;
this.rows = rows;
if (sessionOpen) {
try {
session.resizePTY(columns, rows, width, height);
} catch (IOException e) {
Log.e(TAG, "Couldn't send resize PTY packet", e);
}
}
}
@Override
public int getDefaultPort() {
return DEFAULT_PORT;
}
@Override
public String getDefaultNickname(String username, String hostname, int port) {
if (port == DEFAULT_PORT) {
return String.format("%s@%s", username, hostname);
} else {
return String.format("%s@%s:%d", username, hostname, port);
}
}
public static Uri getUri(String input) {
Matcher matcher = hostmask.matcher(input);
if (!matcher.matches())
return null;
StringBuilder sb = new StringBuilder();
sb.append(PROTOCOL)
.append("://")
.append(Uri.encode(matcher.group(1)))
.append('@')
.append(matcher.group(2));
String portString = matcher.group(4);
int port = DEFAULT_PORT;
if (portString != null) {
try {
port = Integer.parseInt(portString);
if (port < 1 || port > 65535) {
port = DEFAULT_PORT;
}
} catch (NumberFormatException nfe) {
// Keep the default port
}
}
if (port != DEFAULT_PORT) {
sb.append(':')
.append(port);
}
sb.append("/#")
.append(Uri.encode(input));
Uri uri = Uri.parse(sb.toString());
return uri;
}
/**
* Handle challenges from keyboard-interactive authentication mode.
*/
public String[] replyToChallenge(String name, String instruction, int numPrompts, String[] prompt, boolean[] echo) {
interactiveCanContinue = true;
String[] responses = new String[numPrompts];
for(int i = 0; i < numPrompts; i++) {
// request response from user for each prompt
responses[i] = bridge.promptHelper.requestPasswordPrompt(instruction, prompt[i]);
}
return responses;
}
@Override
public HostBean createHost(Uri uri) {
HostBean host = new HostBean();
host.setProtocol(PROTOCOL);
host.setHostname(uri.getHost());
int port = uri.getPort();
if (port < 0)
port = DEFAULT_PORT;
host.setPort(port);
host.setUsername(uri.getUserInfo());
String nickname = uri.getFragment();
if (nickname == null || nickname.length() == 0) {
host.setNickname(getDefaultNickname(host.getUsername(),
host.getHostname(), host.getPort()));
} else {
host.setNickname(uri.getFragment());
}
return host;
}
@Override
public void getSelectionArgs(Uri uri, Map<String, String> selection) {
selection.put(HostDatabase.FIELD_HOST_PROTOCOL, PROTOCOL);
selection.put(HostDatabase.FIELD_HOST_NICKNAME, uri.getFragment());
selection.put(HostDatabase.FIELD_HOST_HOSTNAME, uri.getHost());
int port = uri.getPort();
if (port < 0)
port = DEFAULT_PORT;
selection.put(HostDatabase.FIELD_HOST_PORT, Integer.toString(port));
selection.put(HostDatabase.FIELD_HOST_USERNAME, uri.getUserInfo());
}
@Override
public void setCompression(boolean compression) {
this.compression = compression;
}
public static String getFormatHint(Context context) {
return String.format("%s@%s:%s",
context.getString(R.string.format_username),
context.getString(R.string.format_hostname),
context.getString(R.string.format_port));
}
@Override
public void setUseAuthAgent(String useAuthAgent) {
this.useAuthAgent = useAuthAgent;
}
public Map<String,byte[]> retrieveIdentities() {
Map<String,byte[]> pubKeys = new HashMap<String,byte[]>(manager.loadedKeypairs.size());
for (Entry<String,KeyHolder> entry : manager.loadedKeypairs.entrySet()) {
Object trileadKey = entry.getValue().trileadKey;
try {
if (trileadKey instanceof RSAPrivateKey) {
RSAPublicKey pubkey = ((RSAPrivateKey) trileadKey).getPublicKey();
pubKeys.put(entry.getKey(), RSASHA1Verify.encodeSSHRSAPublicKey(pubkey));
} else if (trileadKey instanceof DSAPrivateKey) {
DSAPublicKey pubkey = ((DSAPrivateKey) trileadKey).getPublicKey();
pubKeys.put(entry.getKey(), DSASHA1Verify.encodeSSHDSAPublicKey(pubkey));
} else
continue;
} catch (IOException e) {
continue;
}
}
return pubKeys;
}
public Object getPrivateKey(byte[] publicKey) {
String nickname = manager.getKeyNickname(publicKey);
if (nickname == null)
return null;
if (useAuthAgent.equals(HostDatabase.AUTHAGENT_NO)) {
Log.e(TAG, "");
return null;
} else if (useAuthAgent.equals(HostDatabase.AUTHAGENT_CONFIRM) ||
manager.loadedKeypairs.get(nickname).bean.isConfirmUse()) {
if (!promptForPubkeyUse(nickname))
return null;
}
return manager.getKey(nickname);
}
private boolean promptForPubkeyUse(String nickname) {
Boolean result = bridge.promptHelper.requestBooleanPrompt(null,
manager.res.getString(R.string.prompt_allow_agent_to_use_key,
nickname));
return result;
}
public boolean addIdentity(Object key, String comment, boolean confirmUse, int lifetime) {
PubkeyBean pubkey = new PubkeyBean();
// pubkey.setType(PubkeyDatabase.KEY_TYPE_IMPORTED);
pubkey.setNickname(comment);
pubkey.setConfirmUse(confirmUse);
pubkey.setLifetime(lifetime);
manager.addKey(pubkey, key);
return true;
}
public boolean removeAllIdentities() {
manager.loadedKeypairs.clear();
return true;
}
public boolean removeIdentity(byte[] publicKey) {
return manager.removeKey(publicKey);
}
public boolean isAgentLocked() {
return agentLockPassphrase != null;
}
public boolean requestAgentUnlock(String unlockPassphrase) {
if (agentLockPassphrase == null)
return false;
if (agentLockPassphrase.equals(unlockPassphrase))
agentLockPassphrase = null;
return agentLockPassphrase == null;
}
public boolean setAgentLock(String lockPassphrase) {
if (agentLockPassphrase != null)
return false;
agentLockPassphrase = lockPassphrase;
return true;
}
/* (non-Javadoc)
* @see com.iiordanov.bssh.transport.AbsTransport#usesNetwork()
*/
@Override
public boolean usesNetwork() {
return true;
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.jscomp.CheckLevel;
/**
* Tests for {@link CheckMissingReturn}.
*
*/
public class CheckMissingReturnTest extends CompilerTestCase {
public CheckMissingReturnTest() {
enableTypeCheck(CheckLevel.OFF);
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
return new CombinedCompilerPass(compiler,
new CheckMissingReturn(compiler, CheckLevel.ERROR));
}
public void testMissingReturn() {
// Requires control flow analysis.
testMissing("if (a) { return 1; }");
// Switch statement.
testMissing("switch(1) { case 12: return 5; }");
// Test try catch finally.
testMissing("try { foo() } catch (e) { return 5; } finally { }");
// Nested scope.
testMissing("/** @return {number} */ function f() { var x; }; return 1;");
testMissing("/** @return {number} */ function f() { return 1; };");
}
public void testReturnNotMissing() {
// Empty function body. Ignore this case. The remainder of the functions in
// this test have non-empty bodies.
testNotMissing("");
// Simple cases.
testSame("function f() { var x; }");
testNotMissing("return 1;");
// Returning void and undefined.
testNotMissing("void", "var x;");
testNotMissing("undefined", "var x;");
// Returning a union that includes void or undefined.
testNotMissing("number|undefined", "var x;");
testNotMissing("number|void", "var x;");
testNotMissing("(number,void)", "var x;");
testNotMissing("(number,undefined)", "var x;");
testNotMissing("*", "var x;");
// Test try catch finally.
testNotMissing("try { return foo() } catch (e) { } finally { }");
// Nested function.
testNotMissing(
"/** @return {number} */ function f() { return 1; }; return 1;");
// Strange tests that come up when reviewing closure code.
testNotMissing("try { return 12; } finally { return 62; }");
testNotMissing("try { } finally { return 1; }");
testNotMissing("switch(1) { default: return 1; }");
testNotMissing("switch(g) { case 1: return 1; default: return 2; }");
}
public void testFinallyStatements() {
// The control flow analysis (CFA) treats finally blocks somewhat strangely.
// The CFA might indicate that a finally block implicitly returns. However,
// if entry into the finally block is normally caused by an explicit return
// statement, then a return statement isn't missing:
//
// try {
// return 1;
// } finally {
// // CFA determines implicit return. However, return not missing
// // because of return statement in try block.
// }
//
// Hence extra tests are warranted for various cases involving finally
// blocks.
// Simple finally case.
testNotMissing("try { return 1; } finally { }");
testNotMissing("try { } finally { return 1; }");
testMissing("try { } finally { }");
// Cycles in the CFG within the finally block were causing problems before.
testNotMissing("try { return 1; } finally { while (true) { } }");
testMissing("try { } finally { while (x) { } }");
testMissing("try { } finally { while (x) { if (x) { break; } } }");
testNotMissing(
"try { return 2; } finally { while (x) { if (x) { break; } } }");
// Test various cases with nested try statements.
testMissing("try { } finally { try { } finally { } }");
testNotMissing("try { } finally { try { return 1; } finally { } }");
testNotMissing("try { return 1; } finally { try { } finally { } }");
// Calling a function potentially causes control flow to transfer to finally
// block. However, the function will not return in this case as the
// exception will unwind the stack. Hence this function isn't missing a
// return statement (i.e., the running program will not expect a return
// value from the function if an exception is thrown).
testNotMissing("try { g(); return 1; } finally { }");
// Closures within try ... finally affect missing return statement analysis
// because of the nested scopes. The following tests check for missing
// return statements in the three possible configurations: both scopes
// return; enclosed doesn't return; enclosing doesn't return.
testNotMissing(
"try {" +
" /** @return {number} */ function f() {" +
" try { return 1; }" +
" finally { }" +
" };" +
" return 1;" +
"}" +
"finally { }");
testMissing(
"try {" +
" /** @return {number} */ function f() {" +
" try { }" +
" finally { }" +
" };" +
" return 1;" +
"}" +
"finally { }");
testMissing(
"try {" +
" /** @return {number} */ function f() {" +
" try { return 1; }" +
" finally { }" +
" };" +
"}" +
"finally { }");
}
public void testKnownConditions() {
testNotMissing("if (true) return 1");
testMissing("if (true) {} else {return 1}");
testMissing("if (false) return 1");
testNotMissing("if (false) {} else {return 1}");
testNotMissing("if (1) return 1");
testMissing("if (1) {} else {return 1}");
testMissing("if (0) return 1");
testNotMissing("if (0) {} else {return 1}");
testNotMissing("if (3) return 1");
testMissing("if (3) {} else {return 1}");
}
public void testKnownWhileLoop() {
testNotMissing("while (1) return 1");
testNotMissing("while (1) { if (x) {return 1} else {return 1}}");
testNotMissing("while (0) {} return 1");
// TODO(user): The current algorithm will not detect this case. It is
// still computable in most cases.
testNotMissing("while (1) {} return 0");
testMissing("while (false) return 1");
// Not known.
testMissing("while(x) { return 1 }");
}
public void testMultiConditions() {
testMissing("if (a) { } else { while (1) {return 1} }");
testNotMissing("if (a) { return 1} else { while (1) {return 1} }");
}
public void testIssue779() {
testNotMissing(
"var a = f(); try { alert(); if (a > 0) return 1; }" +
"finally { a = 5; } return 2;");
}
public void testConstructors() {
testSame("/** @constructor */ function foo() {} ");
final String ConstructorWithReturn = "/** @constructor \n" +
" * @return {!foo} */ function foo() {" +
" if (!(this instanceof foo)) { return new foo; } }";
testSame(ConstructorWithReturn);
}
private static String createFunction(String returnType, String body) {
return "/** @return {" + returnType + "} */ function foo() {" + body + "}";
}
private void testMissing(String returnType, String body) {
String js = createFunction(returnType, body);
test(js, js, CheckMissingReturn.MISSING_RETURN_STATEMENT);
}
private void testNotMissing(String returnType, String body) {
testSame(createFunction(returnType, body));
}
/** Creates function with return type {number} */
private void testNotMissing(String body) {
testNotMissing("number", body);
}
/** Creates function with return type {number} */
private void testMissing(String body) {
testMissing("number", body);
}
}
| |
/**
* Copyright (C) 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.formModeler.core.processing.fieldHandlers.multipleSubform;
import org.apache.commons.lang3.ArrayUtils;
import org.jbpm.formModeler.core.processing.fieldHandlers.subform.utils.SubFormHelper;
import org.jbpm.formModeler.service.bb.mvc.components.handling.BeanHandler;
import org.slf4j.Logger;
import org.jbpm.formModeler.api.model.Field;
import org.jbpm.formModeler.api.model.Form;
import org.jbpm.formModeler.api.model.wrappers.I18nSet;
import org.jbpm.formModeler.core.FieldHandlersManager;
import org.jbpm.formModeler.core.processing.*;
import org.jbpm.formModeler.core.processing.formProcessing.NamespaceManager;
import org.jbpm.formModeler.core.processing.formStatus.FormStatus;
import org.jbpm.formModeler.core.processing.formStatus.FormStatusManager;
import org.jbpm.formModeler.core.rendering.SubformFinderService;
import org.jbpm.formModeler.service.bb.mvc.controller.CommandRequest;
import org.slf4j.LoggerFactory;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.inject.Named;
import java.util.*;
@ApplicationScoped
@Named("org.jbpm.formModeler.core.processing.fieldHandlers.multipleSubform.SubFormSendHandler")
public class SubFormSendHandler extends BeanHandler {
private Logger log = LoggerFactory.getLogger(SubFormSendHandler.class);
@Inject
private SubformFinderService subformFinderService;
@Inject
private SubFormHelper helper;
public NamespaceManager getNamespaceManager() {
return NamespaceManager.lookup();
}
public FormProcessor getFormProcessor() {
return FormProcessingServices.lookup().getFormProcessor();
}
public FormStatusManager getFormStatusManager() {
return FormStatusManager.lookup();
}
public FieldHandlersManager getFieldHandlersManager() {
return FormProcessingServices.lookup().getFieldHandlersManager();
}
public void actionExpandSubform(CommandRequest request) {
log.debug("Expanding subform");
Enumeration parameterNames = request.getRequestObject().getParameterNames();
while (parameterNames.hasMoreElements()) {
String parameterName = (String) parameterNames.nextElement();
if (parameterName.endsWith(FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "expand")) {
String parameterValue = request.getParameter(parameterName);
if ("true".equals(parameterValue) || "false".equals(parameterValue)) {
boolean expand = Boolean.valueOf(parameterValue).booleanValue();
FormNamespaceData fsd = getNamespaceManager().getNamespace(parameterName);
// Clear the child create form
getFormProcessor().setValues(fsd.getForm(), fsd.getNamespace(), request.getRequestObject().getParameterMap(), request.getFilesByParamName());
String fieldName = fsd.getFieldNameInParent();
fieldName = fieldName.substring(0, fieldName.length() - (FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "create").length());
Field field = fsd.getForm().getField(fieldName);
String inputName = getNamespaceManager().generateFieldNamesPace( fsd.getNamespace(), field );
if (expand)
helper.setExpandedField( inputName, inputName );
else {
helper.clearExpandedField( inputName );
}
// PFP : cleared error when the subform to expand is a required field.
if (expand) {
FormStatus formStatus = getFormStatusManager().getFormStatus(fsd.getForm(), fsd.getNamespace());
if (formStatus != null)
formStatus.removeWrongField(fieldName);
}
FieldHandler handler = getFieldHandlersManager().getHandler(field.getFieldType());
if (handler instanceof CreateDynamicObjectFieldHandler) {
CreateDynamicObjectFieldHandler fHandler = (CreateDynamicObjectFieldHandler) handler;
Form createForm = fHandler.getCreateForm(field, fsd.getNamespace());
String createFormNamespace = fsd.getNamespace() + FormProcessor.NAMESPACE_SEPARATOR + fsd.getForm().getId() + FormProcessor.NAMESPACE_SEPARATOR + fieldName + FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "create";
getFormProcessor().clear(createForm, createFormNamespace);
log.debug("Cleared subform status");
} else {
log.error("Can't clear subform to unknown field type: " + handler.getClass().getName());
}
// Clear errors in parent
getFormProcessor().clearFieldErrors(fsd.getForm(), fsd.getNamespace());
break;
}
}
}
}
public void actionAddItem(CommandRequest request) {
log.debug("Adding item to subform");
Set s = getFormNamespaceDatas(request, FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "create");
for (Iterator iterator = s.iterator(); iterator.hasNext();) {
FormNamespaceData formData = (FormNamespaceData) iterator.next();
String fieldName = formData.getFieldNameInParent();
fieldName = fieldName.substring(0, fieldName.length() - (FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "create").length());
Field field = formData.getForm().getField(fieldName);
FieldHandler handler = getFieldHandlersManager().getHandler(field.getFieldType());
if (handler instanceof CreateDynamicObjectFieldHandler) {
CreateDynamicObjectFieldHandler fHandler = (CreateDynamicObjectFieldHandler) handler;
Form createForm = fHandler.getCreateForm(field, formData.getNamespace());
String createFormNamespace = formData.getNamespace() + FormProcessor.NAMESPACE_SEPARATOR + formData.getForm().getId() + FormProcessor.NAMESPACE_SEPARATOR + fieldName;
getFormProcessor().setAttribute(createForm, createFormNamespace, FormStatusData.DO_THE_ITEM_ADD, Boolean.TRUE);
getFormProcessor().setValues(formData.getForm(), formData.getNamespace(), request.getRequestObject().getParameterMap(), request.getFilesByParamName());
FormStatusData createStatus = getFormProcessor().read(createForm, createFormNamespace + FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "create");
boolean operationSuccess = createStatus.isValid();
if (operationSuccess) {
getFormProcessor().clearFieldErrors(formData.getForm(), formData.getNamespace());
getFormProcessor().clear(createForm, createFormNamespace + FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "create");
} else {
List wrongFields = createStatus.getWrongFields();
getFormProcessor().clearFieldErrors(formData.getForm(), formData.getNamespace());
for (int i = 0; i < wrongFields.size(); i++) {
String fieldNameToMarkAsWrong = (String) wrongFields.get(i);
getFormProcessor().forceWrongField(createForm, createFormNamespace + FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "create", fieldNameToMarkAsWrong);
}
}
getFormProcessor().setAttribute(createForm, createFormNamespace, FormStatusData.DO_THE_ITEM_ADD, Boolean.FALSE);
log.debug("Item added to subform");
} else {
log.error("Can't add item to unknown field type: " + handler.getClass().getName());
}
}
}
public void actionDeleteItem(CommandRequest request) throws Exception {
String[] uids = request.getRequestObject().getParameterValues("child_uid_value");
String uid = "";
if (uids != null) {
for (int i = 0; i < uids.length; i++) {
if (uids[i] != null && !"".equals(uids[i])) uid = uids[i];
}
}
String sIndex = request.getParameter(uid + "_index");
String parentFormId = request.getParameter(uid + "_parentFormId");
String parentNamespace = request.getParameter(uid + "_parentNamespace");
String fieldName = request.getParameter(uid + "_field");
String inputName = request.getParameter(uid + "_inputName");
Form parentForm = subformFinderService.getFormById( Long.decode( parentFormId ), parentNamespace );
getFormProcessor().setValues(parentForm, parentNamespace, request.getRequestObject().getParameterMap(), request.getFilesByParamName());
Field field = parentForm.getField(fieldName);
FieldHandler handler = getFieldHandlersManager().getHandler(field.getFieldType());
if (handler instanceof CreateDynamicObjectFieldHandler) {
CreateDynamicObjectFieldHandler fHandler = (CreateDynamicObjectFieldHandler) handler;
int index = Integer.decode(sIndex).intValue();
Object deletedResultValue = fHandler.deleteElementInPosition(parentForm, parentNamespace, fieldName, index);
List<Integer> removedValues = helper.getRemovedFieldPositions( inputName );
if (removedValues == null) helper.setRemovedFieldPositions( inputName, ( removedValues = new ArrayList<Integer>() ) );
removedValues.add(index);
getFormProcessor().modify(parentForm, parentNamespace, fieldName, deletedResultValue);
} else {
log.error("Cannot delete value in a field which is not a CreateDynamicObjectFieldHandler.");
}
getFormProcessor().clearFieldErrors(parentForm, parentNamespace);
}
public void actionEditItem(CommandRequest request) throws Exception {
editItem(request, true);
}
public void actionSaveEditedItem(CommandRequest request) {
log.debug("Saving edited item in subform");
Set s = getFormNamespaceDatas(request, FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + "saveEdited");
for (Iterator iterator = s.iterator(); iterator.hasNext();) {
FormNamespaceData formData = (FormNamespaceData) iterator.next();
getFormProcessor().setValues(formData.getForm(), formData.getNamespace(), request.getRequestObject().getParameterMap(), request.getFilesByParamName(), true);
}
log.debug("Item from subform, saved");
}
public void actionCancelEditItem(CommandRequest request) throws Exception {
editItem(request, false);
}
public void actionPreviewItem(CommandRequest request) throws Exception {
previewItem(request, true);
}
public void actionCancelPreviewItem(CommandRequest request) throws Exception {
previewItem(request, false);
}
public void previewItem(CommandRequest request, boolean doIt) throws Exception {
String[] uids = request.getRequestObject().getParameterValues("child_uid_value");
String uid = "";
if (uids != null) {
for (int i = 0; i < uids.length; i++) {
if (uids[i] != null && !"".equals(uids[i])) uid = uids[i];
}
}
String index = request.getParameter(uid + "_index");
String parentFormId = request.getParameter(uid + "_parentFormId");
String parentNamespace = request.getParameter(uid + "_parentNamespace");
String inputName = request.getParameter(uid + "_inputName");
Form form = subformFinderService.getFormById(Long.decode(parentFormId), parentNamespace);
getFormProcessor().setValues(form, parentNamespace, request.getRequestObject().getParameterMap(), request.getFilesByParamName());
if (doIt) {
helper.setPreviewFieldPosition( inputName, Integer.decode(index) );
} else {
helper.clearPreviewFieldPositions( inputName );
}
getFormProcessor().clearFieldErrors(form, parentNamespace);
}
public void editItem(CommandRequest request, boolean doIt) throws Exception {
String[] uids = request.getRequestObject().getParameterValues("child_uid_value");
String uid = "";
if (uids != null) {
for (int i = 0; i < uids.length; i++) {
if (uids[i] != null && !"".equals(uids[i])) uid = uids[i];
}
}
String index = request.getParameter(uid + "_index");
String parentFormId = request.getParameter(uid + "_parentFormId");
String parentNamespace = request.getParameter(uid + "_parentNamespace");
String fieldName = request.getParameter(uid + "_field");
String inputName = request.getParameter(uid + "_inputName");
Form form = subformFinderService.getFormById( Long.decode( parentFormId ), parentNamespace );
Field field = form.getField(fieldName);
getFormProcessor().setValues(form, parentNamespace, request.getRequestObject().getParameterMap(), request.getFilesByParamName());
if (doIt) {
FormStatusData fsd = getFormProcessor().read(form, parentNamespace);
Map[] previousValue = deepCloneOfMapArray( ( Map[] ) fsd.getCurrentValue( fieldName ), new HashMap() );
helper.setEditFieldPreviousValues( inputName, previousValue );
CreateDynamicObjectFieldHandler fieldHandler = (CreateDynamicObjectFieldHandler) getFieldHandlersManager().getHandler( field.getFieldType() );
Form formToEdit = fieldHandler.getEditForm(field, parentNamespace);
getFormProcessor().clear( formToEdit, parentNamespace + FormProcessor.NAMESPACE_SEPARATOR + parentFormId + FormProcessor.NAMESPACE_SEPARATOR + fieldName + FormProcessor.CUSTOM_NAMESPACE_SEPARATOR + index);
getFormProcessor().clear( formToEdit, parentNamespace + FormProcessor.NAMESPACE_SEPARATOR + parentFormId + FormProcessor.NAMESPACE_SEPARATOR + fieldName );
helper.setEditFieldPosition( inputName, Integer.decode( index ) );
} else {
Object previousValue = helper.getEditFieldPreviousValues( inputName );
getFormProcessor().modify( form, parentNamespace, fieldName, previousValue );
helper.clearExpandedField( inputName );
helper.clearEditFieldPositions( inputName );
helper.clearEditFieldPreviousValues( inputName );
helper.clearPreviewFieldPositions( inputName );
}
getFormProcessor().clearFieldErrors(form, parentNamespace);
}
protected Map[] deepCloneOfMapArray(Map[] maparray, Map cache) {
if (maparray == null || maparray.length == 0) return maparray;
Map[] clone = new Map[0];
for (int i = 0; i < maparray.length; i++) {
Map map = maparray[i];
Map cloneMap = deepCloneOfMap(map, cache);
clone = (Map[]) ArrayUtils.add(clone, cloneMap);
}
return clone;
}
protected Map deepCloneOfMap(Map map, Map cache) {
if (map == null) return null;
if (map instanceof I18nSet) return map;
Map clone = new HashMap();
Set keys = map.keySet();
for (Iterator iterator = keys.iterator(); iterator.hasNext();) {
Object key = iterator.next();
Object value = map.get(key);
if (value == null) {
clone.put(key, value);
} else if (value instanceof Map) {
clone.put(key, deepCloneOfMap((Map) value, cache));
} else if (value instanceof Map[]) {
clone.put(key, deepCloneOfMapArray((Map[]) value, cache));
} else {
clone.put(key, value);
}
}
return clone;
}
protected Set getFormNamespaceDatas(CommandRequest request, String action) {
Set s = new HashSet();
for (Enumeration en = request.getRequestObject().getParameterNames(); en.hasMoreElements();) {
String paramName = (String) en.nextElement();
String paramValue = request.getParameter(paramName);
if ("true".equals(paramValue) && paramName.endsWith(action)) {
FormNamespaceData fsd = getNamespaceManager().getNamespace(paramName);
if (fsd != null) s.add(fsd);
/* Add also all parent namespaces ??? Seems not to be needed
while (fsd != null) {
s.add(fsd);
fsd = namespaceManager.getNamespace(fsd.getNamespace());
} */
}
}
return s;
}
@Override
public boolean isEnabledForActionHandling() {
return true;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.server.remotetask;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.Provides;
import io.airlift.bootstrap.Bootstrap;
import io.airlift.http.client.testing.TestingHttpClient;
import io.airlift.jaxrs.JsonMapper;
import io.airlift.jaxrs.testing.JaxrsTestingHttpProcessor;
import io.airlift.json.JsonCodec;
import io.airlift.json.JsonModule;
import io.airlift.units.Duration;
import io.trino.block.BlockJsonSerde;
import io.trino.client.NodeVersion;
import io.trino.connector.CatalogName;
import io.trino.execution.DynamicFilterConfig;
import io.trino.execution.DynamicFiltersCollector.VersionedDynamicFilterDomains;
import io.trino.execution.Lifespan;
import io.trino.execution.NodeTaskMap;
import io.trino.execution.QueryManagerConfig;
import io.trino.execution.RemoteTask;
import io.trino.execution.StageId;
import io.trino.execution.TaskId;
import io.trino.execution.TaskInfo;
import io.trino.execution.TaskManagerConfig;
import io.trino.execution.TaskSource;
import io.trino.execution.TaskState;
import io.trino.execution.TaskStatus;
import io.trino.execution.TaskTestUtils;
import io.trino.execution.TestSqlTaskManager;
import io.trino.execution.buffer.OutputBuffers;
import io.trino.metadata.HandleJsonModule;
import io.trino.metadata.HandleResolver;
import io.trino.metadata.InternalNode;
import io.trino.metadata.Metadata;
import io.trino.metadata.Split;
import io.trino.server.DynamicFilterService;
import io.trino.server.HttpRemoteTaskFactory;
import io.trino.server.TaskUpdateRequest;
import io.trino.spi.ErrorCode;
import io.trino.spi.QueryId;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockEncodingSerde;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.DynamicFilter;
import io.trino.spi.connector.TestingColumnHandle;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeOperators;
import io.trino.sql.DynamicFilters;
import io.trino.sql.planner.Symbol;
import io.trino.sql.planner.SymbolAllocator;
import io.trino.sql.planner.plan.DynamicFilterId;
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.sql.tree.SymbolReference;
import io.trino.testing.TestingHandleResolver;
import io.trino.testing.TestingSplit;
import io.trino.type.TypeDeserializer;
import org.testng.annotations.Test;
import javax.inject.Singleton;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BooleanSupplier;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService;
import static io.airlift.json.JsonBinder.jsonBinder;
import static io.airlift.json.JsonCodecBinder.jsonCodecBinder;
import static io.airlift.testing.Assertions.assertGreaterThanOrEqual;
import static io.trino.SessionTestUtils.TEST_SESSION;
import static io.trino.execution.DynamicFiltersCollector.INITIAL_DYNAMIC_FILTERS_VERSION;
import static io.trino.execution.TaskTestUtils.TABLE_SCAN_NODE_ID;
import static io.trino.execution.buffer.OutputBuffers.createInitialEmptyOutputBuffers;
import static io.trino.metadata.MetadataManager.createTestMetadataManager;
import static io.trino.server.InternalHeaders.TRINO_CURRENT_VERSION;
import static io.trino.server.InternalHeaders.TRINO_MAX_WAIT;
import static io.trino.spi.StandardErrorCode.REMOTE_TASK_ERROR;
import static io.trino.spi.StandardErrorCode.REMOTE_TASK_MISMATCH;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.testing.assertions.Assert.assertEquals;
import static io.trino.testing.assertions.Assert.assertEventually;
import static java.lang.Math.min;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertTrue;
public class TestHttpRemoteTask
{
// This 30 sec per-test timeout should never be reached because the test should fail and do proper cleanup after 20 sec.
private static final Duration POLL_TIMEOUT = new Duration(100, MILLISECONDS);
private static final Duration IDLE_TIMEOUT = new Duration(3, SECONDS);
private static final Duration FAIL_TIMEOUT = new Duration(20, SECONDS);
private static final TaskManagerConfig TASK_MANAGER_CONFIG = new TaskManagerConfig()
// Shorten status refresh wait and info update interval so that we can have a shorter test timeout
.setStatusRefreshMaxWait(new Duration(IDLE_TIMEOUT.roundTo(MILLISECONDS) / 100, MILLISECONDS))
.setInfoUpdateInterval(new Duration(IDLE_TIMEOUT.roundTo(MILLISECONDS) / 10, MILLISECONDS));
private static final boolean TRACE_HTTP = false;
@Test(timeOut = 30000)
public void testRemoteTaskMismatch()
throws Exception
{
runTest(FailureScenario.TASK_MISMATCH);
}
@Test(timeOut = 30000)
public void testRejectedExecutionWhenVersionIsHigh()
throws Exception
{
runTest(FailureScenario.TASK_MISMATCH_WHEN_VERSION_IS_HIGH);
}
@Test(timeOut = 30000)
public void testRejectedExecution()
throws Exception
{
runTest(FailureScenario.REJECTED_EXECUTION);
}
@Test(timeOut = 30000)
public void testRegular()
throws Exception
{
AtomicLong lastActivityNanos = new AtomicLong(System.nanoTime());
TestingTaskResource testingTaskResource = new TestingTaskResource(lastActivityNanos, FailureScenario.NO_FAILURE);
HttpRemoteTaskFactory httpRemoteTaskFactory = createHttpRemoteTaskFactory(testingTaskResource);
RemoteTask remoteTask = createRemoteTask(httpRemoteTaskFactory, ImmutableSet.of());
testingTaskResource.setInitialTaskInfo(remoteTask.getTaskInfo());
remoteTask.start();
Lifespan lifespan = Lifespan.driverGroup(3);
remoteTask.addSplits(ImmutableMultimap.of(TABLE_SCAN_NODE_ID, new Split(new CatalogName("test"), TestingSplit.createLocalSplit(), lifespan)));
poll(() -> testingTaskResource.getTaskSource(TABLE_SCAN_NODE_ID) != null);
poll(() -> testingTaskResource.getTaskSource(TABLE_SCAN_NODE_ID).getSplits().size() == 1);
remoteTask.noMoreSplits(TABLE_SCAN_NODE_ID, lifespan);
poll(() -> testingTaskResource.getTaskSource(TABLE_SCAN_NODE_ID).getNoMoreSplitsForLifespan().size() == 1);
remoteTask.noMoreSplits(TABLE_SCAN_NODE_ID);
poll(() -> testingTaskResource.getTaskSource(TABLE_SCAN_NODE_ID).isNoMoreSplits());
remoteTask.cancel();
poll(() -> remoteTask.getTaskStatus().getState().isDone());
poll(() -> remoteTask.getTaskInfo().getTaskStatus().getState().isDone());
httpRemoteTaskFactory.stop();
}
@Test(timeOut = 30000)
public void testDynamicFilters()
throws Exception
{
DynamicFilterId filterId1 = new DynamicFilterId("df1");
DynamicFilterId filterId2 = new DynamicFilterId("df2");
SymbolAllocator symbolAllocator = new SymbolAllocator();
Symbol symbol1 = symbolAllocator.newSymbol("DF_SYMBOL1", BIGINT);
Symbol symbol2 = symbolAllocator.newSymbol("DF_SYMBOL2", BIGINT);
SymbolReference df1 = symbol1.toSymbolReference();
SymbolReference df2 = symbol2.toSymbolReference();
ColumnHandle handle1 = new TestingColumnHandle("column1");
ColumnHandle handle2 = new TestingColumnHandle("column2");
QueryId queryId = new QueryId("test");
TestingTaskResource testingTaskResource = new TestingTaskResource(new AtomicLong(System.nanoTime()), FailureScenario.NO_FAILURE);
DynamicFilterService dynamicFilterService = new DynamicFilterService(createTestMetadataManager(), new TypeOperators(), newDirectExecutorService());
HttpRemoteTaskFactory httpRemoteTaskFactory = createHttpRemoteTaskFactory(testingTaskResource, dynamicFilterService);
RemoteTask remoteTask = createRemoteTask(httpRemoteTaskFactory, ImmutableSet.of());
Map<DynamicFilterId, Domain> initialDomain = ImmutableMap.of(
filterId1,
Domain.singleValue(BIGINT, 1L));
testingTaskResource.setInitialTaskInfo(remoteTask.getTaskInfo());
testingTaskResource.setDynamicFilterDomains(new VersionedDynamicFilterDomains(1L, initialDomain));
dynamicFilterService.registerQuery(
queryId,
TEST_SESSION,
ImmutableSet.of(filterId1, filterId2),
ImmutableSet.of(filterId1, filterId2),
ImmutableSet.of());
dynamicFilterService.stageCannotScheduleMoreTasks(new StageId(queryId, 1), 1);
DynamicFilter dynamicFilter = dynamicFilterService.createDynamicFilter(
queryId,
ImmutableList.of(
new DynamicFilters.Descriptor(filterId1, df1),
new DynamicFilters.Descriptor(filterId2, df2)),
ImmutableMap.of(
symbol1, handle1,
symbol2, handle2),
symbolAllocator.getTypes());
// make sure initial dynamic filters are collected
CompletableFuture<?> future = dynamicFilter.isBlocked();
remoteTask.start();
future.get();
assertEquals(
dynamicFilter.getCurrentPredicate(),
TupleDomain.withColumnDomains(ImmutableMap.of(
handle1, Domain.singleValue(BIGINT, 1L))));
assertEquals(testingTaskResource.getDynamicFiltersFetchCounter(), 1);
// make sure dynamic filters are not collected for every status update
assertEventually(
new Duration(15, SECONDS),
() -> assertGreaterThanOrEqual(testingTaskResource.getStatusFetchCounter(), 3L));
future = dynamicFilter.isBlocked();
testingTaskResource.setDynamicFilterDomains(new VersionedDynamicFilterDomains(
2L,
ImmutableMap.of(filterId2, Domain.singleValue(BIGINT, 2L))));
future.get();
assertEquals(
dynamicFilter.getCurrentPredicate(),
TupleDomain.withColumnDomains(ImmutableMap.of(
handle1, Domain.singleValue(BIGINT, 1L),
handle2, Domain.singleValue(BIGINT, 2L))));
assertEquals(testingTaskResource.getDynamicFiltersFetchCounter(), 2L);
assertGreaterThanOrEqual(testingTaskResource.getStatusFetchCounter(), 4L);
httpRemoteTaskFactory.stop();
dynamicFilterService.stop();
}
@Test(timeOut = 30_000)
public void testOutboundDynamicFilters()
throws Exception
{
DynamicFilterId filterId1 = new DynamicFilterId("df1");
DynamicFilterId filterId2 = new DynamicFilterId("df2");
SymbolAllocator symbolAllocator = new SymbolAllocator();
Symbol symbol1 = symbolAllocator.newSymbol("DF_SYMBOL1", BIGINT);
Symbol symbol2 = symbolAllocator.newSymbol("DF_SYMBOL2", BIGINT);
SymbolReference df1 = symbol1.toSymbolReference();
SymbolReference df2 = symbol2.toSymbolReference();
ColumnHandle handle1 = new TestingColumnHandle("column1");
ColumnHandle handle2 = new TestingColumnHandle("column2");
QueryId queryId = new QueryId("test");
TestingTaskResource testingTaskResource = new TestingTaskResource(new AtomicLong(System.nanoTime()), FailureScenario.NO_FAILURE);
DynamicFilterService dynamicFilterService = new DynamicFilterService(createTestMetadataManager(), new TypeOperators(), newDirectExecutorService());
dynamicFilterService.registerQuery(
queryId,
TEST_SESSION,
ImmutableSet.of(filterId1, filterId2),
ImmutableSet.of(filterId1, filterId2),
ImmutableSet.of());
dynamicFilterService.stageCannotScheduleMoreTasks(new StageId(queryId, 1), 1);
DynamicFilter dynamicFilter = dynamicFilterService.createDynamicFilter(
queryId,
ImmutableList.of(
new DynamicFilters.Descriptor(filterId1, df1),
new DynamicFilters.Descriptor(filterId2, df2)),
ImmutableMap.of(
symbol1, handle1,
symbol2, handle2),
symbolAllocator.getTypes());
// make sure initial dynamic filter is collected
CompletableFuture<?> future = dynamicFilter.isBlocked();
dynamicFilterService.addTaskDynamicFilters(
new TaskId(queryId.getId(), 1, 1),
ImmutableMap.of(filterId1, Domain.singleValue(BIGINT, 1L)));
future.get();
assertEquals(
dynamicFilter.getCurrentPredicate(),
TupleDomain.withColumnDomains(ImmutableMap.of(
handle1, Domain.singleValue(BIGINT, 1L))));
// Create remote task after dynamic filter is created to simulate new nodes joining
HttpRemoteTaskFactory httpRemoteTaskFactory = createHttpRemoteTaskFactory(testingTaskResource, dynamicFilterService);
RemoteTask remoteTask = createRemoteTask(httpRemoteTaskFactory, ImmutableSet.of(filterId1, filterId2));
testingTaskResource.setInitialTaskInfo(remoteTask.getTaskInfo());
remoteTask.start();
assertEventually(
new Duration(10, SECONDS),
() -> assertEquals(testingTaskResource.getDynamicFiltersSentCounter(), 1L));
assertEquals(testingTaskResource.getCreateOrUpdateCounter(), 1L);
// schedule a couple of splits to trigger task updates
addSplit(remoteTask, testingTaskResource, 1);
addSplit(remoteTask, testingTaskResource, 2);
// make sure dynamic filter was sent in task updates only once
assertEquals(testingTaskResource.getDynamicFiltersSentCounter(), 1L);
assertEquals(testingTaskResource.getCreateOrUpdateCounter(), 3L);
assertEquals(
testingTaskResource.getLatestDynamicFilterFromCoordinator(),
ImmutableMap.of(filterId1, Domain.singleValue(BIGINT, 1L)));
future = dynamicFilter.isBlocked();
dynamicFilterService.addTaskDynamicFilters(
new TaskId(queryId.getId(), 1, 1),
ImmutableMap.of(filterId2, Domain.singleValue(BIGINT, 2L)));
future.get();
assertEquals(
dynamicFilter.getCurrentPredicate(),
TupleDomain.withColumnDomains(ImmutableMap.of(
handle1, Domain.singleValue(BIGINT, 1L),
handle2, Domain.singleValue(BIGINT, 2L))));
// dynamic filter should be sent even though there were no further splits scheduled
assertEventually(
new Duration(10, SECONDS),
() -> assertEquals(testingTaskResource.getDynamicFiltersSentCounter(), 2L));
assertEquals(testingTaskResource.getCreateOrUpdateCounter(), 4L);
// previously sent dynamic filter should not be repeated
assertEquals(
testingTaskResource.getLatestDynamicFilterFromCoordinator(),
ImmutableMap.of(filterId2, Domain.singleValue(BIGINT, 2L)));
httpRemoteTaskFactory.stop();
dynamicFilterService.stop();
}
private void runTest(FailureScenario failureScenario)
throws Exception
{
AtomicLong lastActivityNanos = new AtomicLong(System.nanoTime());
TestingTaskResource testingTaskResource = new TestingTaskResource(lastActivityNanos, failureScenario);
HttpRemoteTaskFactory httpRemoteTaskFactory = createHttpRemoteTaskFactory(testingTaskResource);
RemoteTask remoteTask = createRemoteTask(httpRemoteTaskFactory, ImmutableSet.of());
testingTaskResource.setInitialTaskInfo(remoteTask.getTaskInfo());
remoteTask.start();
waitUntilIdle(lastActivityNanos);
httpRemoteTaskFactory.stop();
assertTrue(remoteTask.getTaskStatus().getState().isDone(), format("TaskStatus is not in a done state: %s", remoteTask.getTaskStatus()));
ErrorCode actualErrorCode = getOnlyElement(remoteTask.getTaskStatus().getFailures()).getErrorCode();
switch (failureScenario) {
case TASK_MISMATCH:
case TASK_MISMATCH_WHEN_VERSION_IS_HIGH:
assertTrue(remoteTask.getTaskInfo().getTaskStatus().getState().isDone(), format("TaskInfo is not in a done state: %s", remoteTask.getTaskInfo()));
assertEquals(actualErrorCode, REMOTE_TASK_MISMATCH.toErrorCode());
break;
case REJECTED_EXECUTION:
// for a rejection to occur, the http client must be shutdown, which means we will not be able to ge the final task info
assertEquals(actualErrorCode, REMOTE_TASK_ERROR.toErrorCode());
break;
default:
throw new UnsupportedOperationException();
}
}
private void addSplit(RemoteTask remoteTask, TestingTaskResource testingTaskResource, int expectedSplitsCount)
throws InterruptedException
{
Lifespan lifespan = Lifespan.driverGroup(3);
remoteTask.addSplits(ImmutableMultimap.of(TABLE_SCAN_NODE_ID, new Split(new CatalogName("test"), TestingSplit.createLocalSplit(), lifespan)));
// wait for splits to be received by remote task
poll(() -> testingTaskResource.getTaskSource(TABLE_SCAN_NODE_ID) != null);
poll(() -> testingTaskResource.getTaskSource(TABLE_SCAN_NODE_ID).getSplits().size() == expectedSplitsCount);
}
private RemoteTask createRemoteTask(HttpRemoteTaskFactory httpRemoteTaskFactory, Set<DynamicFilterId> outboundDynamicFilterIds)
{
return httpRemoteTaskFactory.createRemoteTask(
TEST_SESSION,
new TaskId("test", 1, 2),
new InternalNode("node-id", URI.create("http://fake.invalid/"), new NodeVersion("version"), false),
TaskTestUtils.PLAN_FRAGMENT,
ImmutableMultimap.of(),
createInitialEmptyOutputBuffers(OutputBuffers.BufferType.BROADCAST),
new NodeTaskMap.PartitionedSplitCountTracker(i -> {}),
outboundDynamicFilterIds,
true);
}
private static HttpRemoteTaskFactory createHttpRemoteTaskFactory(TestingTaskResource testingTaskResource)
{
return createHttpRemoteTaskFactory(testingTaskResource, new DynamicFilterService(createTestMetadataManager(), new TypeOperators(), new DynamicFilterConfig()));
}
private static HttpRemoteTaskFactory createHttpRemoteTaskFactory(TestingTaskResource testingTaskResource, DynamicFilterService dynamicFilterService)
{
Bootstrap app = new Bootstrap(
new JsonModule(),
new HandleJsonModule(),
new Module()
{
@Override
public void configure(Binder binder)
{
binder.bind(JsonMapper.class);
binder.bind(Metadata.class).toInstance(createTestMetadataManager());
jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class);
jsonCodecBinder(binder).bindJsonCodec(TaskStatus.class);
jsonCodecBinder(binder).bindJsonCodec(VersionedDynamicFilterDomains.class);
jsonBinder(binder).addSerializerBinding(Block.class).to(BlockJsonSerde.Serializer.class);
jsonBinder(binder).addDeserializerBinding(Block.class).to(BlockJsonSerde.Deserializer.class);
jsonCodecBinder(binder).bindJsonCodec(TaskInfo.class);
jsonCodecBinder(binder).bindJsonCodec(TaskUpdateRequest.class);
}
@Provides
@Singleton
public BlockEncodingSerde createBlockEncodingSerde(Metadata metadata)
{
return metadata.getBlockEncodingSerde();
}
@Provides
private HttpRemoteTaskFactory createHttpRemoteTaskFactory(
JsonMapper jsonMapper,
JsonCodec<TaskStatus> taskStatusCodec,
JsonCodec<VersionedDynamicFilterDomains> dynamicFilterDomainsCodec,
JsonCodec<TaskInfo> taskInfoCodec,
JsonCodec<TaskUpdateRequest> taskUpdateRequestCodec)
{
JaxrsTestingHttpProcessor jaxrsTestingHttpProcessor = new JaxrsTestingHttpProcessor(URI.create("http://fake.invalid/"), testingTaskResource, jsonMapper);
TestingHttpClient testingHttpClient = new TestingHttpClient(jaxrsTestingHttpProcessor.setTrace(TRACE_HTTP));
testingTaskResource.setHttpClient(testingHttpClient);
return new HttpRemoteTaskFactory(
new QueryManagerConfig(),
TASK_MANAGER_CONFIG,
testingHttpClient,
new TestSqlTaskManager.MockLocationFactory(),
taskStatusCodec,
dynamicFilterDomainsCodec,
taskInfoCodec,
taskUpdateRequestCodec,
new RemoteTaskStats(),
dynamicFilterService);
}
});
Injector injector = app
.doNotInitializeLogging()
.quiet()
.initialize();
HandleResolver handleResolver = injector.getInstance(HandleResolver.class);
handleResolver.addCatalogHandleResolver("test", new TestingHandleResolver());
return injector.getInstance(HttpRemoteTaskFactory.class);
}
private static void poll(BooleanSupplier success)
throws InterruptedException
{
long failAt = System.nanoTime() + FAIL_TIMEOUT.roundTo(NANOSECONDS);
while (!success.getAsBoolean()) {
long millisUntilFail = (failAt - System.nanoTime()) / 1_000_000;
if (millisUntilFail <= 0) {
throw new AssertionError(format("Timeout of %s reached", FAIL_TIMEOUT));
}
Thread.sleep(min(POLL_TIMEOUT.toMillis(), millisUntilFail));
}
}
private static void waitUntilIdle(AtomicLong lastActivityNanos)
throws InterruptedException
{
long startTimeNanos = System.nanoTime();
while (true) {
long millisSinceLastActivity = (System.nanoTime() - lastActivityNanos.get()) / 1_000_000L;
long millisSinceStart = (System.nanoTime() - startTimeNanos) / 1_000_000L;
long millisToIdleTarget = IDLE_TIMEOUT.toMillis() - millisSinceLastActivity;
long millisToFailTarget = FAIL_TIMEOUT.toMillis() - millisSinceStart;
if (millisToFailTarget < millisToIdleTarget) {
throw new AssertionError(format("Activity doesn't stop after %s", FAIL_TIMEOUT));
}
if (millisToIdleTarget < 0) {
return;
}
Thread.sleep(millisToIdleTarget);
}
}
private enum FailureScenario
{
NO_FAILURE,
TASK_MISMATCH,
TASK_MISMATCH_WHEN_VERSION_IS_HIGH,
REJECTED_EXECUTION,
}
@Path("/task/{nodeId}")
public static class TestingTaskResource
{
private static final String INITIAL_TASK_INSTANCE_ID = "task-instance-id";
private static final String NEW_TASK_INSTANCE_ID = "task-instance-id-x";
private final AtomicLong lastActivityNanos;
private final FailureScenario failureScenario;
private final AtomicReference<TestingHttpClient> httpClient = new AtomicReference<>();
private TaskInfo initialTaskInfo;
private TaskStatus initialTaskStatus;
private Optional<VersionedDynamicFilterDomains> dynamicFilterDomains = Optional.empty();
private long version;
private TaskState taskState;
private String taskInstanceId = INITIAL_TASK_INSTANCE_ID;
private Map<DynamicFilterId, Domain> latestDynamicFilterFromCoordinator = ImmutableMap.of();
private long statusFetchCounter;
private long createOrUpdateCounter;
private long dynamicFiltersFetchCounter;
private long dynamicFiltersSentCounter;
public TestingTaskResource(AtomicLong lastActivityNanos, FailureScenario failureScenario)
{
this.lastActivityNanos = requireNonNull(lastActivityNanos, "lastActivityNanos is null");
this.failureScenario = requireNonNull(failureScenario, "failureScenario is null");
}
public void setHttpClient(TestingHttpClient newValue)
{
httpClient.set(newValue);
}
@GET
@Path("{taskId}")
@Produces(MediaType.APPLICATION_JSON)
public synchronized TaskInfo getTaskInfo(
@PathParam("taskId") TaskId taskId,
@HeaderParam(TRINO_CURRENT_VERSION) Long currentVersion,
@HeaderParam(TRINO_MAX_WAIT) Duration maxWait,
@Context UriInfo uriInfo)
{
lastActivityNanos.set(System.nanoTime());
return buildTaskInfo();
}
Map<PlanNodeId, TaskSource> taskSourceMap = new HashMap<>();
@POST
@Path("{taskId}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public synchronized TaskInfo createOrUpdateTask(
@PathParam("taskId") TaskId taskId,
TaskUpdateRequest taskUpdateRequest,
@Context UriInfo uriInfo)
{
for (TaskSource source : taskUpdateRequest.getSources()) {
taskSourceMap.compute(source.getPlanNodeId(), (planNodeId, taskSource) -> taskSource == null ? source : taskSource.update(source));
}
if (!taskUpdateRequest.getDynamicFilterDomains().isEmpty()) {
dynamicFiltersSentCounter++;
latestDynamicFilterFromCoordinator = taskUpdateRequest.getDynamicFilterDomains();
}
createOrUpdateCounter++;
lastActivityNanos.set(System.nanoTime());
return buildTaskInfo();
}
public synchronized TaskSource getTaskSource(PlanNodeId planNodeId)
{
TaskSource source = taskSourceMap.get(planNodeId);
if (source == null) {
return null;
}
return new TaskSource(source.getPlanNodeId(), source.getSplits(), source.getNoMoreSplitsForLifespan(), source.isNoMoreSplits());
}
@GET
@Path("{taskId}/status")
@Produces(MediaType.APPLICATION_JSON)
public synchronized TaskStatus getTaskStatus(
@PathParam("taskId") TaskId taskId,
@HeaderParam(TRINO_CURRENT_VERSION) Long currentVersion,
@HeaderParam(TRINO_MAX_WAIT) Duration maxWait,
@Context UriInfo uriInfo)
throws InterruptedException
{
lastActivityNanos.set(System.nanoTime());
wait(maxWait.roundTo(MILLISECONDS));
return buildTaskStatus();
}
@GET
@Path("{taskId}/dynamicfilters")
@Produces(MediaType.APPLICATION_JSON)
public synchronized VersionedDynamicFilterDomains acknowledgeAndGetNewDynamicFilterDomains(
@PathParam("taskId") TaskId taskId,
@HeaderParam(TRINO_CURRENT_VERSION) Long currentDynamicFiltersVersion,
@Context UriInfo uriInfo)
{
dynamicFiltersFetchCounter++;
return dynamicFilterDomains.orElse(null);
}
@DELETE
@Path("{taskId}")
@Produces(MediaType.APPLICATION_JSON)
public synchronized TaskInfo deleteTask(
@PathParam("taskId") TaskId taskId,
@QueryParam("abort") @DefaultValue("true") boolean abort,
@Context UriInfo uriInfo)
{
lastActivityNanos.set(System.nanoTime());
taskState = abort ? TaskState.ABORTED : TaskState.CANCELED;
return buildTaskInfo();
}
public void setInitialTaskInfo(TaskInfo initialTaskInfo)
{
this.initialTaskInfo = initialTaskInfo;
this.initialTaskStatus = initialTaskInfo.getTaskStatus();
this.taskState = initialTaskStatus.getState();
this.version = initialTaskStatus.getVersion();
switch (failureScenario) {
case TASK_MISMATCH_WHEN_VERSION_IS_HIGH:
// Make the initial version large enough.
// This way, the version number can't be reached if it is reset to 0.
version = 1_000_000;
break;
case TASK_MISMATCH:
case REJECTED_EXECUTION:
case NO_FAILURE:
break; // do nothing
default:
throw new UnsupportedOperationException();
}
}
public synchronized void setDynamicFilterDomains(VersionedDynamicFilterDomains dynamicFilterDomains)
{
this.dynamicFilterDomains = Optional.of(dynamicFilterDomains);
}
public Map<DynamicFilterId, Domain> getLatestDynamicFilterFromCoordinator()
{
return latestDynamicFilterFromCoordinator;
}
public synchronized long getStatusFetchCounter()
{
return statusFetchCounter;
}
public synchronized long getCreateOrUpdateCounter()
{
return createOrUpdateCounter;
}
public synchronized long getDynamicFiltersFetchCounter()
{
return dynamicFiltersFetchCounter;
}
public synchronized long getDynamicFiltersSentCounter()
{
return dynamicFiltersSentCounter;
}
private TaskInfo buildTaskInfo()
{
return new TaskInfo(
buildTaskStatus(),
initialTaskInfo.getLastHeartbeat(),
initialTaskInfo.getOutputBuffers(),
initialTaskInfo.getNoMoreSplits(),
initialTaskInfo.getStats(),
initialTaskInfo.isNeedsPlan());
}
private TaskStatus buildTaskStatus()
{
statusFetchCounter++;
// Change the task instance id after 10th fetch to simulate worker restart
switch (failureScenario) {
case TASK_MISMATCH:
case TASK_MISMATCH_WHEN_VERSION_IS_HIGH:
if (statusFetchCounter == 10) {
taskInstanceId = NEW_TASK_INSTANCE_ID;
version = 0;
}
break;
case REJECTED_EXECUTION:
if (statusFetchCounter >= 10) {
httpClient.get().close();
throw new RejectedExecutionException();
}
break;
case NO_FAILURE:
break;
default:
throw new UnsupportedOperationException();
}
return new TaskStatus(
initialTaskStatus.getTaskId(),
taskInstanceId,
++version,
taskState,
initialTaskStatus.getSelf(),
"fake",
ImmutableSet.of(),
initialTaskStatus.getFailures(),
initialTaskStatus.getQueuedPartitionedDrivers(),
initialTaskStatus.getRunningPartitionedDrivers(),
initialTaskStatus.isOutputBufferOverutilized(),
initialTaskStatus.getPhysicalWrittenDataSize(),
initialTaskStatus.getMemoryReservation(),
initialTaskStatus.getSystemMemoryReservation(),
initialTaskStatus.getRevocableMemoryReservation(),
initialTaskStatus.getFullGcCount(),
initialTaskStatus.getFullGcTime(),
dynamicFilterDomains.map(VersionedDynamicFilterDomains::getVersion).orElse(INITIAL_DYNAMIC_FILTERS_VERSION));
}
}
}
| |
/**
*============================================================================
* The Ohio State University Research Foundation, The University of Chicago -
* Argonne National Laboratory, Emory University, SemanticBits LLC,
* and Ekagra Software Technologies Ltd.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-portal/LICENSE.txt for details.
*============================================================================
**/
package org.json;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.io.StringWriter;
/**
* Test class. This file is not formally a member of the org.json library.
* It is just a casual test tool.
*/
public class Test {
/**
* Entry point.
* @param args
*/
public static void main(String args[]) {
Iterator it;
JSONArray a;
JSONObject j;
JSONStringer jj;
String s;
/**
* Obj is a typical class that implements JSONString. It also
* provides some beanie methods that can be used to
* construct a JSONObject. It also demonstrates constructing
* a JSONObject with an array of names.
*/
class Obj implements JSONString {
public String aString;
public double aNumber;
public boolean aBoolean;
public Obj(String string, double n, boolean b) {
this.aString = string;
this.aNumber = n;
this.aBoolean = b;
}
public double getNumber() {
return this.aNumber;
}
public String getString() {
return this.aString;
}
public boolean isBoolean() {
return this.aBoolean;
}
public String getBENT() {
return "All uppercase key";
}
public String getX() {
return "x";
}
public String toJSONString() {
return "{" + JSONObject.quote(this.aString) + ":" +
JSONObject.doubleToString(this.aNumber) + "}";
}
public String toString() {
return this.getString() + " " + this.getNumber() + " " +
this.isBoolean() + "." + this.getBENT() + " " + this.getX();
}
}
Obj obj = new Obj("A beany object", 42, true);
try {
j = XML.toJSONObject("<![CDATA[This is a collection of test patterns and examples for org.json.]]> Ignore the stuff past the end. ");
System.out.println(j.toString());
s = "{ \"list of lists\" : [ [1, 2, 3], [4, 5, 6], ] }";
j = new JSONObject(s);
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
s = "<recipe name=\"bread\" prep_time=\"5 mins\" cook_time=\"3 hours\"> <title>Basic bread</title> <ingredient amount=\"8\" unit=\"dL\">Flour</ingredient> <ingredient amount=\"10\" unit=\"grams\">Yeast</ingredient> <ingredient amount=\"4\" unit=\"dL\" state=\"warm\">Water</ingredient> <ingredient amount=\"1\" unit=\"teaspoon\">Salt</ingredient> <instructions> <step>Mix all ingredients together.</step> <step>Knead thoroughly.</step> <step>Cover with a cloth, and leave for one hour in warm room.</step> <step>Knead again.</step> <step>Place in a bread baking tin.</step> <step>Cover with a cloth, and leave for one hour in warm room.</step> <step>Bake in the oven at 180(degrees)C for 30 minutes.</step> </instructions> </recipe> ";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println();
j = JSONML.toJSONObject(s);
System.out.println(j.toString());
System.out.println(JSONML.toString(j));
System.out.println();
a = JSONML.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println(JSONML.toString(a));
System.out.println();
s = "<div id=\"demo\" class=\"JSONML\"><p>JSONML is a transformation between <b>JSON</b> and <b>XML</b> that preserves ordering of document features.</p><p>JSONML can work with JSON arrays or JSON objects.</p><p>Three<br/>little<br/>words</p></div>";
j = JSONML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println(JSONML.toString(j));
System.out.println();
a = JSONML.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println(JSONML.toString(a));
System.out.println();
s = "<person created=\"2006-11-11T19:23\" modified=\"2006-12-31T23:59\">\n <firstName>Robert</firstName>\n <lastName>Smith</lastName>\n <address type=\"home\">\n <street>12345 Sixth Ave</street>\n <city>Anytown</city>\n <state>CA</state>\n <postalCode>98765-4321</postalCode>\n </address>\n </person>";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
j = new JSONObject(obj);
System.out.println(j.toString());
s = "{ \"entity\": { \"imageURL\": \"\", \"name\": \"IXXXXXXXXXXXXX\", \"id\": 12336, \"ratingCount\": null, \"averageRating\": null } }";
j = new JSONObject(s);
System.out.println(j.toString(2));
jj = new JSONStringer();
s = jj
.object()
.key("single")
.value("MARIE HAA'S")
.key("Johnny")
.value("MARIE HAA\\'S")
.key("foo")
.value("bar")
.key("baz")
.array()
.object()
.key("quux")
.value("Thanks, Josh!")
.endObject()
.endArray()
.key("obj keys")
.value(JSONObject.getNames(obj))
.endObject()
.toString();
System.out.println(s);
System.out.println(new JSONStringer()
.object()
.key("a")
.array()
.array()
.array()
.value("b")
.endArray()
.endArray()
.endArray()
.endObject()
.toString());
jj = new JSONStringer();
jj.array();
jj.value(1);
jj.array();
jj.value(null);
jj.array();
jj.object();
jj.key("empty-array").array().endArray();
jj.key("answer").value(42);
jj.key("null").value(null);
jj.key("false").value(false);
jj.key("true").value(true);
jj.key("big").value(123456789e+88);
jj.key("small").value(123456789e-88);
jj.key("empty-object").object().endObject();
jj.key("long");
jj.value(9223372036854775807L);
jj.endObject();
jj.value("two");
jj.endArray();
jj.value(true);
jj.endArray();
jj.value(98.6);
jj.value(-100.0);
jj.object();
jj.endObject();
jj.object();
jj.key("one");
jj.value(1.00);
jj.endObject();
jj.value(obj);
jj.endArray();
System.out.println(jj.toString());
System.out.println(new JSONArray(jj.toString()).toString(4));
int ar[] = {1, 2, 3};
JSONArray ja = new JSONArray(ar);
System.out.println(ja.toString());
String sa[] = {"aString", "aNumber", "aBoolean"};
j = new JSONObject(obj, sa);
j.put("Testing JSONString interface", obj);
System.out.println(j.toString(4));
j = new JSONObject("{slashes: '///', closetag: '</script>', backslash:'\\\\', ei: {quotes: '\"\\''},eo: {a: '\"quoted\"', b:\"don't\"}, quotes: [\"'\", '\"']}");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = new JSONObject(
"{foo: [true, false,9876543210, 0.0, 1.00000001, 1.000000000001, 1.00000000000000001," +
" .00000000000000001, 2.00, 0.1, 2e100, -32,[],{}, \"string\"], " +
" to : null, op : 'Good'," +
"ten:10} postfix comment");
j.put("String", "98.6");
j.put("JSONObject", new JSONObject());
j.put("JSONArray", new JSONArray());
j.put("int", 57);
j.put("double", 123456789012345678901234567890.);
j.put("true", true);
j.put("false", false);
j.put("null", JSONObject.NULL);
j.put("bool", "true");
j.put("zero", -0.0);
j.put("\\u2028", "\u2028");
j.put("\\u2029", "\u2029");
a = j.getJSONArray("foo");
a.put(666);
a.put(2001.99);
a.put("so \"fine\".");
a.put("so <fine>.");
a.put(true);
a.put(false);
a.put(new JSONArray());
a.put(new JSONObject());
j.put("keys", JSONObject.getNames(j));
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
System.out.println("String: " + j.getDouble("String"));
System.out.println(" bool: " + j.getBoolean("bool"));
System.out.println(" to: " + j.getString("to"));
System.out.println(" true: " + j.getString("true"));
System.out.println(" foo: " + j.getJSONArray("foo"));
System.out.println(" op: " + j.getString("op"));
System.out.println(" ten: " + j.getInt("ten"));
System.out.println(" oops: " + j.optBoolean("oops"));
s = "<xml one = 1 two=' \"2\" '><five></five>First \u0009<content><five></five> This is \"content\". <three> 3 </three>JSON does not preserve the sequencing of elements and contents.<three> III </three> <three> T H R E E</three><four/>Content text is an implied structure in XML. <six content=\"6\"/>JSON does not have implied structure:<seven>7</seven>everything is explicit.<![CDATA[CDATA blocks<are><supported>!]]></xml>";
j = XML.toJSONObject(s);
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
s = "<xml do='0'>uno<a re='1' mi='2'>dos<b fa='3'/>tres<c>true</c>quatro</a>cinqo<d>seis<e/></d></xml>";
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
s = "<mapping><empty/> <class name = \"Customer\"> <field name = \"ID\" type = \"string\"> <bind-xml name=\"ID\" node=\"attribute\"/> </field> <field name = \"FirstName\" type = \"FirstName\"/> <field name = \"MI\" type = \"MI\"/> <field name = \"LastName\" type = \"LastName\"/> </class> <class name = \"FirstName\"> <field name = \"text\"> <bind-xml name = \"text\" node = \"text\"/> </field> </class> <class name = \"MI\"> <field name = \"text\"> <bind-xml name = \"text\" node = \"text\"/> </field> </class> <class name = \"LastName\"> <field name = \"text\"> <bind-xml name = \"text\" node = \"text\"/> </field> </class></mapping>";
j = XML.toJSONObject(s);
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
j = XML.toJSONObject("<?xml version=\"1.0\" ?><Book Author=\"Anonymous\"><Title>Sample Book</Title><Chapter id=\"1\">This is chapter 1. It is not very long or interesting.</Chapter><Chapter id=\"2\">This is chapter 2. Although it is longer than chapter 1, it is not any more interesting.</Chapter></Book>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<!DOCTYPE bCard 'http://www.cs.caltech.edu/~adam/schemas/bCard'><bCard><?xml default bCard firstname = '' lastname = '' company = '' email = '' homepage = ''?><bCard firstname = 'Rohit' lastname = 'Khare' company = 'MCI' email = 'khare@mci.net' homepage = 'http://pest.w3.org/'/><bCard firstname = 'Adam' lastname = 'Rifkin' company = 'Caltech Infospheres Project' email = 'adam@cs.caltech.edu' homepage = 'http://www.cs.caltech.edu/~adam/'/></bCard>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<?xml version=\"1.0\"?><customer> <firstName> <text>Fred</text> </firstName> <ID>fbs0001</ID> <lastName> <text>Scerbo</text> </lastName> <MI> <text>B</text> </MI></customer>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<!ENTITY tp-address PUBLIC '-//ABC University::Special Collections Library//TEXT (titlepage: name and address)//EN' 'tpspcoll.sgm'><list type='simple'><head>Repository Address </head><item>Special Collections Library</item><item>ABC University</item><item>Main Library, 40 Circle Drive</item><item>Ourtown, Pennsylvania</item><item>17654 USA</item></list>");
System.out.println(j.toString());
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<test intertag status=ok><empty/>deluxe<blip sweet=true>&"toot"&toot;A</blip><x>eks</x><w>bonus</w><w>bonus2</w></test>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = HTTP.toJSONObject("GET / HTTP/1.0\nAccept: image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, application/vnd.ms-powerpoint, application/vnd.ms-excel, application/msword, */*\nAccept-Language: en-us\nUser-Agent: Mozilla/4.0 (compatible; MSIE 5.5; Windows 98; Win 9x 4.90; T312461; Q312461)\nHost: www.nokko.com\nConnection: keep-alive\nAccept-encoding: gzip, deflate\n");
System.out.println(j.toString(2));
System.out.println(HTTP.toString(j));
System.out.println("");
j = HTTP.toJSONObject("HTTP/1.1 200 Oki Doki\nDate: Sun, 26 May 2002 17:38:52 GMT\nServer: Apache/1.3.23 (Unix) mod_perl/1.26\nKeep-Alive: timeout=15, max=100\nConnection: Keep-Alive\nTransfer-Encoding: chunked\nContent-Type: text/html\n");
System.out.println(j.toString(2));
System.out.println(HTTP.toString(j));
System.out.println("");
j = new JSONObject("{nix: null, nux: false, null: 'null', 'Request-URI': '/', Method: 'GET', 'HTTP-Version': 'HTTP/1.0'}");
System.out.println(j.toString(2));
System.out.println("isNull: " + j.isNull("nix"));
System.out.println(" has: " + j.has("nix"));
System.out.println(XML.toString(j));
System.out.println(HTTP.toString(j));
System.out.println("");
j = XML.toJSONObject("<?xml version='1.0' encoding='UTF-8'?>"+"\n\n"+"<SOAP-ENV:Envelope"+
" xmlns:SOAP-ENV=\"http://schemas.xmlsoap.org/soap/envelope/\""+
" xmlns:xsi=\"http://www.w3.org/1999/XMLSchema-instance\""+
" xmlns:xsd=\"http://www.w3.org/1999/XMLSchema\">"+
"<SOAP-ENV:Body><ns1:doGoogleSearch"+
" xmlns:ns1=\"urn:GoogleSearch\""+
" SOAP-ENV:encodingStyle=\"http://schemas.xmlsoap.org/soap/encoding/\">"+
"<key xsi:type=\"xsd:string\">GOOGLEKEY</key> <q"+
" xsi:type=\"xsd:string\">'+search+'</q> <start"+
" xsi:type=\"xsd:int\">0</start> <maxResults"+
" xsi:type=\"xsd:int\">10</maxResults> <filter"+
" xsi:type=\"xsd:boolean\">true</filter> <restrict"+
" xsi:type=\"xsd:string\"></restrict> <safeSearch"+
" xsi:type=\"xsd:boolean\">false</safeSearch> <lr"+
" xsi:type=\"xsd:string\"></lr> <ie"+
" xsi:type=\"xsd:string\">latin1</ie> <oe"+
" xsi:type=\"xsd:string\">latin1</oe>"+
"</ns1:doGoogleSearch>"+
"</SOAP-ENV:Body></SOAP-ENV:Envelope>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = new JSONObject("{Envelope: {Body: {\"ns1:doGoogleSearch\": {oe: \"latin1\", filter: true, q: \"'+search+'\", key: \"GOOGLEKEY\", maxResults: 10, \"SOAP-ENV:encodingStyle\": \"http://schemas.xmlsoap.org/soap/encoding/\", start: 0, ie: \"latin1\", safeSearch:false, \"xmlns:ns1\": \"urn:GoogleSearch\"}}}}");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = CookieList.toJSONObject(" f%oo = b+l=ah ; o;n%40e = t.wo ");
System.out.println(j.toString(2));
System.out.println(CookieList.toString(j));
System.out.println("");
j = Cookie.toJSONObject("f%oo=blah; secure ;expires = April 24, 2002");
System.out.println(j.toString(2));
System.out.println(Cookie.toString(j));
System.out.println("");
j = new JSONObject("{script: 'It is not allowed in HTML to send a close script tag in a string<script>because it confuses browsers</script>so we insert a backslash before the /'}");
System.out.println(j.toString());
System.out.println("");
JSONTokener jt = new JSONTokener("{op:'test', to:'session', pre:1}{op:'test', to:'session', pre:2}");
j = new JSONObject(jt);
System.out.println(j.toString());
System.out.println("pre: " + j.optInt("pre"));
int i = jt.skipTo('{');
System.out.println(i);
j = new JSONObject(jt);
System.out.println(j.toString());
System.out.println("");
a = CDL.toJSONArray("Comma delimited list test, '\"Strip\"Quotes', 'quote, comma', No quotes, 'Single Quotes', \"Double Quotes\"\n1,'2',\"3\"\n,'It is \"good,\"', \"It works.\"\n\n");
s = CDL.toString(a);
System.out.println(s);
System.out.println("");
System.out.println(a.toString(4));
System.out.println("");
a = CDL.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println("");
a = new JSONArray(" [\"<escape>\", next is an implied null , , ok,] ");
System.out.println(a.toString());
System.out.println("");
System.out.println(XML.toString(a));
System.out.println("");
j = new JSONObject("{ fun => with non-standard forms ; forgiving => This package can be used to parse formats that are similar to but not stricting conforming to JSON; why=To make it easier to migrate existing data to JSON,one = [[1.00]]; uno=[[{1=>1}]];'+':+6e66 ;pluses=+++;empty = '' , 'double':0.666,true: TRUE, false: FALSE, null=NULL;[true] = [[!,@;*]]; string=> o. k. ; \r oct=0666; hex=0x666; dec=666; o=0999; noh=0x0x}");
System.out.println(j.toString(4));
System.out.println("");
if (j.getBoolean("true") && !j.getBoolean("false")) {
System.out.println("It's all good");
}
System.out.println("");
j = new JSONObject(j, new String[]{"dec", "oct", "hex", "missing"});
System.out.println(j.toString(4));
System.out.println("");
System.out.println(new JSONStringer().array().value(a).value(j).endArray());
j = new JSONObject("{string: \"98.6\", long: 2147483648, int: 2147483647, longer: 9223372036854775807, double: 9223372036854775808}");
System.out.println(j.toString(4));
System.out.println("\ngetInt");
System.out.println("int " + j.getInt("int"));
System.out.println("long " + j.getInt("long"));
System.out.println("longer " + j.getInt("longer"));
System.out.println("double " + j.getInt("double"));
System.out.println("string " + j.getInt("string"));
System.out.println("\ngetLong");
System.out.println("int " + j.getLong("int"));
System.out.println("long " + j.getLong("long"));
System.out.println("longer " + j.getLong("longer"));
System.out.println("double " + j.getLong("double"));
System.out.println("string " + j.getLong("string"));
System.out.println("\ngetDouble");
System.out.println("int " + j.getDouble("int"));
System.out.println("long " + j.getDouble("long"));
System.out.println("longer " + j.getDouble("longer"));
System.out.println("double " + j.getDouble("double"));
System.out.println("string " + j.getDouble("string"));
j.put("good sized", 9223372036854775807L);
System.out.println(j.toString(4));
a = new JSONArray("[2147483647, 2147483648, 9223372036854775807, 9223372036854775808]");
System.out.println(a.toString(4));
System.out.println("\nKeys: ");
it = j.keys();
while (it.hasNext()) {
s = (String)it.next();
System.out.println(s + ": " + j.getString(s));
}
System.out.println("\naccumulate: ");
j = new JSONObject();
j.accumulate("stooge", "Curly");
j.accumulate("stooge", "Larry");
j.accumulate("stooge", "Moe");
a = j.getJSONArray("stooge");
a.put(5, "Shemp");
System.out.println(j.toString(4));
System.out.println("\nwrite:");
System.out.println(j.write(new StringWriter()));
s = "<xml empty><a></a><a>1</a><a>22</a><a>333</a></xml>";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
s = "<book><chapter>Content of the first chapter</chapter><chapter>Content of the second chapter <chapter>Content of the first subchapter</chapter> <chapter>Content of the second subchapter</chapter></chapter><chapter>Third Chapter</chapter></book>";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
a = JSONML.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println(JSONML.toString(a));
Collection c = null;
Map m = null;
j = new JSONObject(m);
a = new JSONArray(c);
j.append("stooge", "Joe DeRita");
j.append("stooge", "Shemp");
j.accumulate("stooges", "Curly");
j.accumulate("stooges", "Larry");
j.accumulate("stooges", "Moe");
j.accumulate("stoogearray", j.get("stooges"));
j.put("map", m);
j.put("collection", c);
j.put("array", a);
a.put(m);
a.put(c);
System.out.println(j.toString(4));
s = "{plist=Apple; AnimalSmells = { pig = piggish; lamb = lambish; worm = wormy; }; AnimalSounds = { pig = oink; lamb = baa; worm = baa; Lisa = \"Why is the worm talking like a lamb?\" } ; AnimalColors = { pig = pink; lamb = black; worm = pink; } } ";
j = new JSONObject(s);
System.out.println(j.toString(4));
s = " (\"San Francisco\", \"New York\", \"Seoul\", \"London\", \"Seattle\", \"Shanghai\")";
a = new JSONArray(s);
System.out.println(a.toString());
s = "<a ichi='1' ni='2'><b>The content of b</b> and <c san='3'>The content of c</c><d>do</d><e></e><d>re</d><f/><d>mi</d></a>";
j = XML.toJSONObject(s);
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
System.out.println("\nTesting Exceptions: ");
System.out.print("Exception: ");
try {
a = new JSONArray();
a.put(Double.NEGATIVE_INFINITY);
a.put(Double.NaN);
System.out.println(a.toString());
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(j.getDouble("stooge"));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(j.getDouble("howard"));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(j.put(null, "howard"));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(a.getDouble(0));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(a.get(-1));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(a.put(Double.NaN));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
j = XML.toJSONObject("<a><b> ");
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
j = XML.toJSONObject("<a></b> ");
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
j = XML.toJSONObject("<a></a ");
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
ja = new JSONArray(new Object());
System.out.println(ja.toString());
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "[)";
a = new JSONArray(s);
System.out.println(a.toString());
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "<xml";
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "<right></wrong>";
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "{\"koda\": true, \"koda\": true}";
j = new JSONObject(s);
System.out.println(j.toString(4));
} catch (Exception e) {
System.out.println(e);
}
System.out.print("Exception: ");
try {
jj = new JSONStringer();
s = jj
.object()
.key("bosanda")
.value("MARIE HAA'S")
.key("bosanda")
.value("MARIE HAA\\'S")
.endObject()
.toString();
System.out.println(j.toString(4));
} catch (Exception e) {
System.out.println(e);
}
} catch (Exception e) {
System.out.println(e.toString());
}
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings.voice;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.pm.ServiceInfo;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.content.res.XmlResourceParser;
import android.provider.Settings;
import android.service.voice.VoiceInteractionService;
import android.service.voice.VoiceInteractionServiceInfo;
import android.speech.RecognitionService;
import android.util.ArraySet;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Xml;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public final class VoiceInputHelper {
static final String TAG = "VoiceInputHelper";
final Context mContext;
final List<ResolveInfo> mAvailableVoiceInteractions;
final List<ResolveInfo> mAvailableRecognition;
static public class BaseInfo implements Comparable {
public final ServiceInfo service;
public final ComponentName componentName;
public final String key;
public final ComponentName settings;
public final CharSequence label;
public final String labelStr;
public final CharSequence appLabel;
public BaseInfo(PackageManager pm, ServiceInfo _service, String _settings) {
service = _service;
componentName = new ComponentName(_service.packageName, _service.name);
key = componentName.flattenToShortString();
settings = _settings != null
? new ComponentName(_service.packageName, _settings) : null;
label = _service.loadLabel(pm);
labelStr = label.toString();
appLabel = _service.applicationInfo.loadLabel(pm);
}
@Override
public int compareTo(Object another) {
return labelStr.compareTo(((BaseInfo)another).labelStr);
}
}
static public class InteractionInfo extends BaseInfo {
public final VoiceInteractionServiceInfo serviceInfo;
public InteractionInfo(PackageManager pm, VoiceInteractionServiceInfo _service) {
super(pm, _service.getServiceInfo(), _service.getSettingsActivity());
serviceInfo = _service;
}
}
static public class RecognizerInfo extends BaseInfo {
public RecognizerInfo(PackageManager pm, ServiceInfo _service, String _settings) {
super(pm, _service, _settings);
}
}
final ArrayList<InteractionInfo> mAvailableInteractionInfos = new ArrayList<>();
final ArrayList<RecognizerInfo> mAvailableRecognizerInfos = new ArrayList<>();
ComponentName mCurrentVoiceInteraction;
ComponentName mCurrentRecognizer;
public VoiceInputHelper(Context context) {
mContext = context;
mAvailableVoiceInteractions = mContext.getPackageManager().queryIntentServices(
new Intent(VoiceInteractionService.SERVICE_INTERFACE),
PackageManager.GET_META_DATA);
mAvailableRecognition = mContext.getPackageManager().queryIntentServices(
new Intent(RecognitionService.SERVICE_INTERFACE),
PackageManager.GET_META_DATA);
}
public boolean hasItems() {
return mAvailableVoiceInteractions.size() > 0 || mAvailableRecognition.size() > 0;
}
public void buildUi() {
// Get the currently selected interactor from the secure setting.
String currentSetting = Settings.Secure.getString(
mContext.getContentResolver(), Settings.Secure.VOICE_INTERACTION_SERVICE);
if (currentSetting != null && !currentSetting.isEmpty()) {
mCurrentVoiceInteraction = ComponentName.unflattenFromString(currentSetting);
} else {
mCurrentVoiceInteraction = null;
}
ArraySet<ComponentName> interactorRecognizers = new ArraySet<>();
// Iterate through all the available interactors and load up their info to show
// in the preference.
int size = mAvailableVoiceInteractions.size();
for (int i = 0; i < size; i++) {
ResolveInfo resolveInfo = mAvailableVoiceInteractions.get(i);
VoiceInteractionServiceInfo info = new VoiceInteractionServiceInfo(
mContext.getPackageManager(), resolveInfo.serviceInfo);
if (info.getParseError() != null) {
Log.w("VoiceInteractionService", "Error in VoiceInteractionService "
+ resolveInfo.serviceInfo.packageName + "/"
+ resolveInfo.serviceInfo.name + ": " + info.getParseError());
continue;
}
mAvailableInteractionInfos.add(new InteractionInfo(mContext.getPackageManager(), info));
interactorRecognizers.add(new ComponentName(resolveInfo.serviceInfo.packageName,
info.getRecognitionService()));
}
Collections.sort(mAvailableInteractionInfos);
// Get the currently selected recognizer from the secure setting.
currentSetting = Settings.Secure.getString(
mContext.getContentResolver(), Settings.Secure.VOICE_RECOGNITION_SERVICE);
if (currentSetting != null && !currentSetting.isEmpty()) {
mCurrentRecognizer = ComponentName.unflattenFromString(currentSetting);
} else {
mCurrentRecognizer = null;
}
// Iterate through all the available recognizers and load up their info to show
// in the preference.
size = mAvailableRecognition.size();
for (int i = 0; i < size; i++) {
ResolveInfo resolveInfo = mAvailableRecognition.get(i);
ComponentName comp = new ComponentName(resolveInfo.serviceInfo.packageName,
resolveInfo.serviceInfo.name);
if (interactorRecognizers.contains(comp)) {
//continue;
}
ServiceInfo si = resolveInfo.serviceInfo;
XmlResourceParser parser = null;
String settingsActivity = null;
try {
parser = si.loadXmlMetaData(mContext.getPackageManager(),
RecognitionService.SERVICE_META_DATA);
if (parser == null) {
throw new XmlPullParserException("No " + RecognitionService.SERVICE_META_DATA +
" meta-data for " + si.packageName);
}
Resources res = mContext.getPackageManager().getResourcesForApplication(
si.applicationInfo);
AttributeSet attrs = Xml.asAttributeSet(parser);
int type;
while ((type=parser.next()) != XmlPullParser.END_DOCUMENT
&& type != XmlPullParser.START_TAG) {
}
String nodeName = parser.getName();
if (!"recognition-service".equals(nodeName)) {
throw new XmlPullParserException(
"Meta-data does not start with recognition-service tag");
}
TypedArray array = res.obtainAttributes(attrs,
com.android.internal.R.styleable.RecognitionService);
settingsActivity = array.getString(
com.android.internal.R.styleable.RecognitionService_settingsActivity);
array.recycle();
} catch (XmlPullParserException e) {
Log.e(TAG, "error parsing recognition service meta-data", e);
} catch (IOException e) {
Log.e(TAG, "error parsing recognition service meta-data", e);
} catch (PackageManager.NameNotFoundException e) {
Log.e(TAG, "error parsing recognition service meta-data", e);
} finally {
if (parser != null) parser.close();
}
mAvailableRecognizerInfos.add(new RecognizerInfo(mContext.getPackageManager(),
resolveInfo.serviceInfo, settingsActivity));
}
Collections.sort(mAvailableRecognizerInfos);
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs.history;
import com.intellij.CommonBundle;
import com.intellij.icons.AllIcons;
import com.intellij.ide.CopyProvider;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.VcsInternalDataKeys;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.colors.EditorColorsListener;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.util.Clock;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Getter;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.ByteBackedContentRevision;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.changes.CurrentContentRevision;
import com.intellij.openapi.vcs.changes.issueLinks.IssueLinkRenderer;
import com.intellij.openapi.vcs.changes.issueLinks.TableLinkMouseListener;
import com.intellij.openapi.vcs.impl.VcsBackgroundableActions;
import com.intellij.openapi.vcs.vfs.VcsFileSystem;
import com.intellij.openapi.vcs.vfs.VcsVirtualFile;
import com.intellij.openapi.vcs.vfs.VcsVirtualFolder;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.*;
import com.intellij.ui.dualView.CellWrapper;
import com.intellij.ui.dualView.DualView;
import com.intellij.ui.dualView.DualViewColumnInfo;
import com.intellij.ui.dualView.TreeTableView;
import com.intellij.ui.speedSearch.SpeedSearchUtil;
import com.intellij.ui.table.TableView;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Consumer;
import com.intellij.util.PlatformIcons;
import com.intellij.util.TreeItem;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.DateFormatUtil;
import com.intellij.util.ui.ColumnInfo;
import com.intellij.util.ui.StatusText;
import com.intellij.util.ui.UIUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableModel;
import javax.swing.tree.TreeCellRenderer;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.datatransfer.StringSelection;
import java.io.IOException;
import java.util.List;
import java.util.*;
import static java.util.Comparator.comparing;
import static java.util.Comparator.reverseOrder;
/**
* author: lesya
*/
public class FileHistoryPanelImpl extends JPanel implements DataProvider, Disposable, EditorColorsListener, CopyProvider {
private static final String VCS_HISTORY_POPUP_ACTION_GROUP = "VcsHistoryInternalGroup.Popup";
private static final String VCS_HISTORY_TOOLBAR_ACTION_GROUP = "VcsHistoryInternalGroup.Toolbar";
public static final DataKey<VcsFileRevision> PREVIOUS_REVISION_FOR_DIFF = DataKey.create("PREVIOUS_VCS_FILE_REVISION_FOR_DIFF");
private final String myHelpId;
@NotNull private final AbstractVcs myVcs;
private final VcsHistoryProvider myProvider;
@NotNull private final FileHistoryRefresherI myRefresherI;
@NotNull private final FilePath myFilePath;
@Nullable private final VcsRevisionNumber myStartingRevision;
@NotNull private final Map<VcsRevisionNumber, Integer> myRevisionsOrder = new HashMap<>();
@NotNull private final Map<VcsFileRevision, VirtualFile> myRevisionToVirtualFile = new HashMap<>();
@NotNull private final DetailsPanel myDetails;
@NotNull private final DualView myDualView;
@Nullable private final JComponent myAdditionalDetails;
@Nullable private final Consumer<VcsFileRevision> myRevisionSelectionListener;
@NotNull private VcsHistorySession myHistorySession;
private VcsFileRevision myBottomRevisionForShowDiff;
private List<Object> myTargetSelection;
private boolean myIsStaticAndEmbedded;
private Splitter myDetailsSplitter;
private Splitter mySplitter;
public FileHistoryPanelImpl(@NotNull AbstractVcs vcs,
@NotNull FilePath filePath,
@NotNull VcsHistorySession session,
VcsHistoryProvider provider,
@NotNull FileHistoryRefresherI refresherI,
final boolean isStaticEmbedded) {
this(vcs, filePath, null, session, provider, refresherI, isStaticEmbedded);
}
public FileHistoryPanelImpl(@NotNull AbstractVcs vcs,
@NotNull FilePath filePath,
@Nullable VcsRevisionNumber startingRevision,
@NotNull VcsHistorySession session,
VcsHistoryProvider provider,
@NotNull FileHistoryRefresherI refresherI,
final boolean isStaticEmbedded) {
super(new BorderLayout());
myHelpId = provider.getHelpId() != null ? provider.getHelpId() : "reference.versionControl.toolwindow.history";
myIsStaticAndEmbedded = false;
myVcs = vcs;
myProvider = provider;
myRefresherI = refresherI;
myHistorySession = session;
myFilePath = filePath;
myStartingRevision = startingRevision;
myDetails = new DetailsPanel(vcs.getProject());
refreshRevisionsOrder();
final VcsDependentHistoryComponents components = provider.getUICustomization(session, this);
myAdditionalDetails = components.getDetailsComponent();
myRevisionSelectionListener = components.getRevisionListener();
final DualViewColumnInfo[] columns = createColumnList(vcs.getProject(), provider, components.getColumns());
@NonNls String storageKey = "FileHistory." + provider.getClass().getName();
final HistoryAsTreeProvider treeHistoryProvider = myHistorySession.getHistoryAsTreeProvider();
if (treeHistoryProvider != null) {
myDualView = new DualView(new TreeNodeOnVcsRevision(null, treeHistoryProvider.createTreeOn(myHistorySession.getRevisionList())),
columns, storageKey, myVcs.getProject());
}
else {
myDualView =
new DualView(new TreeNodeOnVcsRevision(null, ContainerUtil.map(myHistorySession.getRevisionList(), TreeItem::new)), columns,
storageKey, myVcs.getProject());
myDualView.switchToTheFlatMode();
}
new TableSpeedSearch(myDualView.getFlatView()).setComparator(new SpeedSearchComparator(false));
final TableLinkMouseListener listener = new TableLinkMouseListener();
listener.installOn(myDualView.getFlatView());
listener.installOn(myDualView.getTreeView());
myDualView.setEmptyText(CommonBundle.getLoadingTreeNodeText());
setupDualView(fillActionGroup(true, new DefaultActionGroup()));
if (isStaticEmbedded) {
setIsStaticAndEmbedded(true);
}
DefaultActionGroup toolbarGroup = new DefaultActionGroup();
fillActionGroup(false, toolbarGroup);
ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.FILEHISTORY_VIEW_TOOLBAR, toolbarGroup,
isStaticEmbedded);
JComponent centerPanel = createCenterPanel();
toolbar.setTargetComponent(centerPanel);
for (AnAction action : toolbarGroup.getChildren(null)) {
action.registerCustomShortcutSet(action.getShortcutSet(), centerPanel);
}
add(centerPanel, BorderLayout.CENTER);
add(toolbar.getComponent(), isStaticEmbedded ? BorderLayout.NORTH : BorderLayout.WEST);
chooseView();
Disposer.register(vcs.getProject(), this);
}
private static void makeBold(Component component) {
if (component instanceof JComponent) {
JComponent jComponent = (JComponent)component;
Font font = jComponent.getFont();
if (font != null) {
jComponent.setFont(font.deriveFont(Font.BOLD));
}
}
else if (component instanceof Container) {
Container container = (Container)component;
for (int i = 0; i < container.getComponentCount(); i++) {
makeBold(container.getComponent(i));
}
}
}
@NotNull
public static String getPresentableText(@NotNull VcsFileRevision revision, boolean withMessage) {
// implementation reflected by com.intellij.vcs.log.ui.frame.VcsLogGraphTable.getPresentableText()
StringBuilder sb = new StringBuilder();
sb.append(VcsUtil.getShortRevisionString(revision.getRevisionNumber())).append(" ");
sb.append(revision.getAuthor());
long time = revision.getRevisionDate().getTime();
sb.append(" on ").append(DateFormatUtil.formatDate(time)).append(" at ").append(DateFormatUtil.formatTime(time));
if (revision instanceof VcsFileRevisionEx) {
if (!Comparing.equal(revision.getAuthor(), ((VcsFileRevisionEx)revision).getCommitterName())) {
sb.append(" (committed by ").append(((VcsFileRevisionEx)revision).getCommitterName()).append(")");
}
}
if (withMessage) {
sb.append(" ").append(MessageColumnInfo.getSubject(revision));
}
return sb.toString();
}
/**
* Checks if the given historyPanel shows the history for given path and revision number.
*/
static boolean sameHistories(@NotNull FileHistoryPanelImpl historyPanel,
@NotNull FilePath filePath2,
@Nullable VcsRevisionNumber startingRevision2) {
return sameHistories(historyPanel.myFilePath, historyPanel.myStartingRevision, filePath2, startingRevision2);
}
public static boolean sameHistories(@NotNull FilePath filePath1, @Nullable VcsRevisionNumber startingRevision1,
@NotNull FilePath filePath2, @Nullable VcsRevisionNumber startingRevision2) {
String existingRevision = startingRevision1 == null ? null : startingRevision1.asString();
String newRevision = startingRevision2 == null ? null : startingRevision2.asString();
return filePath1.equals(filePath2) && Comparing.equal(existingRevision, newRevision);
}
private DualViewColumnInfo @NotNull [] createColumnList(@NotNull Project project,
@NotNull VcsHistoryProvider provider,
ColumnInfo @Nullable [] additionalColumns) {
ArrayList<DualViewColumnInfo> columns = new ArrayList<>();
columns.add(new TreeNodeColumnInfoWrapper<>(
new RevisionColumnInfo(comparing(revision -> myRevisionsOrder.get(revision.getRevisionNumber()), reverseOrder()))));
if (!provider.isDateOmittable()) columns.add(new TreeNodeColumnInfoWrapper<>(new DateColumnInfo()));
columns.add(new TreeNodeColumnInfoWrapper<>(new AuthorColumnInfo()));
if (additionalColumns != null) {
for (ColumnInfo additionalColumn : additionalColumns) {
columns.add(new TreeNodeColumnInfoWrapper(additionalColumn));
}
}
columns.add(new TreeNodeColumnInfoWrapper<>(new MessageColumnInfo(project)));
return columns.toArray(new DualViewColumnInfo[0]);
}
@CalledInAwt
public void setHistorySession(@NotNull VcsHistorySession session) {
if (myTargetSelection == null) {
myTargetSelection = myDualView.getFlatView().getSelectedObjects();
}
myHistorySession = session;
refreshRevisionsOrder();
HistoryAsTreeProvider treeHistoryProvider = session.getHistoryAsTreeProvider();
if (myHistorySession.getRevisionList().isEmpty()) {
adjustEmptyText();
}
if (treeHistoryProvider != null) {
myDualView.setRoot(new TreeNodeOnVcsRevision(null,
treeHistoryProvider.createTreeOn(myHistorySession.getRevisionList())),
myTargetSelection);
}
else {
myDualView.setRoot(new TreeNodeOnVcsRevision(null, ContainerUtil.map(myHistorySession.getRevisionList(), TreeItem::new)),
myTargetSelection);
}
mySplitter.revalidate();
mySplitter.repaint();
myDualView.expandAll();
myDualView.repaint();
}
@CalledInAwt
public void finishRefresh() {
if (myHistorySession.getHistoryAsTreeProvider() != null) {
// scroll tree view to most recent change
final TreeTableView treeView = myDualView.getTreeView();
final int lastRow = treeView.getRowCount() - 1;
if (lastRow >= 0) {
treeView.scrollRectToVisible(treeView.getCellRect(lastRow, 0, true));
}
}
myTargetSelection = null;
mySplitter.revalidate();
mySplitter.repaint();
}
private void adjustEmptyText() {
VirtualFile virtualFile = myFilePath.getVirtualFile();
if ((virtualFile == null || !virtualFile.isValid()) && !myFilePath.getIOFile().exists()) {
myDualView.setEmptyText("File " + myFilePath.getName() + " not found");
}
else if (VcsCachingHistory.getHistoryLock(myVcs, VcsBackgroundableActions.CREATE_HISTORY_SESSION, myFilePath, myStartingRevision)
.isLocked()) {
myDualView.setEmptyText(CommonBundle.getLoadingTreeNodeText());
}
else {
myDualView.setEmptyText(StatusText.getDefaultEmptyText());
}
}
private void setupDualView(@NotNull DefaultActionGroup group) {
myDualView.setShowGrid(true);
PopupHandler.installPopupHandler(myDualView.getTreeView(), group, ActionPlaces.UPDATE_POPUP, ActionManager.getInstance());
PopupHandler.installPopupHandler(myDualView.getFlatView(), group, ActionPlaces.UPDATE_POPUP, ActionManager.getInstance());
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(myDualView, true));
myDualView.addListSelectionListener(e -> updateMessage());
myDualView.setRootVisible(false);
myDualView.expandAll();
myDualView.setTreeCellRenderer(new MyTreeCellRenderer(myDualView.getTree().getCellRenderer(), () -> myHistorySession));
myDualView.setCellWrapper(new MyCellWrapper(() -> myHistorySession));
myDualView.installDoubleClickHandler(EmptyAction.wrap(ActionManager.getInstance().getAction(IdeActions.ACTION_SHOW_DIFF_COMMON)));
myDualView.getFlatView().getTableViewModel().setSortable(true);
RowSorter<? extends TableModel> rowSorter = myDualView.getFlatView().getRowSorter();
if (rowSorter != null) {
rowSorter.setSortKeys(Collections.singletonList(new RowSorter.SortKey(0, SortOrder.DESCENDING)));
}
}
private void updateMessage() {
//noinspection unchecked
List<TreeNodeOnVcsRevision> selection = (List<TreeNodeOnVcsRevision>)myDualView.getSelection();
myDetails.update(selection);
if (selection.isEmpty()) {
return;
}
if (myRevisionSelectionListener != null) {
myRevisionSelectionListener.consume(selection.get(0).getRevision());
}
}
@NotNull
protected JComponent createCenterPanel() {
mySplitter = new OnePixelSplitter(true, "vcs.history.splitter.proportion", 0.6f);
mySplitter.setFirstComponent(myDualView);
JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myDetails);
scrollPane.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT));
myDetailsSplitter = new OnePixelSplitter(false, "vcs.history.details.splitter.proportion", 0.5f);
myDetailsSplitter.setFirstComponent(scrollPane);
myDetailsSplitter.setSecondComponent(myAdditionalDetails);
setupDetails();
return mySplitter;
}
private void setupDetails() {
boolean showDetails = !myIsStaticAndEmbedded && VcsConfiguration.getInstance(myVcs.getProject()).SHOW_FILE_HISTORY_DETAILS;
myDualView.setViewBorder(IdeBorderFactory.createBorder(SideBorder.LEFT));
mySplitter.setSecondComponent(showDetails ? myDetailsSplitter : null);
}
private void chooseView() {
if (VcsConfiguration.getInstance(myVcs.getProject()).SHOW_FILE_HISTORY_AS_TREE) {
myDualView.switchToTheTreeMode();
}
else {
myDualView.switchToTheFlatMode();
}
}
@NotNull
private DefaultActionGroup fillActionGroup(boolean popup, DefaultActionGroup result) {
if (popup) {
result.add(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE));
}
AnAction actionGroup = ActionManager.getInstance().getAction(popup ? VCS_HISTORY_POPUP_ACTION_GROUP : VCS_HISTORY_TOOLBAR_ACTION_GROUP);
result.add(actionGroup);
AnAction[] additionalActions =
myProvider.getAdditionalActions(() -> ApplicationManager.getApplication().invokeAndWait(() -> myRefresherI.refresh(true)));
if (additionalActions != null) {
for (AnAction additionalAction : additionalActions) {
if (popup || additionalAction.getTemplatePresentation().getIcon() != null) {
result.add(additionalAction);
}
}
}
if (!myIsStaticAndEmbedded) {
result.add(new MyShowDetailsAction());
}
if (!popup && myHistorySession.getHistoryAsTreeProvider() != null) {
result.add(new MyShowAsTreeAction());
}
return result;
}
@Override
public Object getData(@NotNull String dataId) {
if (CommonDataKeys.NAVIGATABLE.is(dataId)) {
VcsFileRevision[] selectedRevisions = getSelectedRevisions();
if (selectedRevisions.length != 1) return null;
VcsFileRevision firstSelectedRevision = ArrayUtil.getFirstElement(selectedRevisions);
if (!myHistorySession.isContentAvailable(firstSelectedRevision)) {
return null;
}
VirtualFile virtualFileForRevision = createVirtualFileForRevision(firstSelectedRevision);
if (virtualFileForRevision != null) {
return new OpenFileDescriptor(myVcs.getProject(), virtualFileForRevision);
}
else {
return null;
}
}
else if (CommonDataKeys.PROJECT.is(dataId)) {
return myVcs.getProject();
}
else if (VcsDataKeys.VCS_FILE_REVISION.is(dataId)) {
return ArrayUtil.getFirstElement(getSelectedRevisions());
}
else if (VcsDataKeys.VCS_NON_LOCAL_HISTORY_SESSION.is(dataId)) {
return !myHistorySession.hasLocalSource();
}
else if (VcsDataKeys.VCS.is(dataId)) {
return myVcs.getKeyInstanceMethod();
}
else if (VcsDataKeys.VCS_FILE_REVISIONS.is(dataId)) {
return getSelectedRevisions();
}
else if (VcsDataKeys.REMOTE_HISTORY_CHANGED_LISTENER.is(dataId)) {
return (Consumer<String>)s -> myDualView.rebuild();
}
else if (VcsDataKeys.CHANGES.is(dataId)) {
return getChanges();
}
else if (VcsDataKeys.VCS_VIRTUAL_FILE.is(dataId)) {
VcsFileRevision[] selectedRevisions = getSelectedRevisions();
if (selectedRevisions.length == 0) return null;
return createVirtualFileForRevision(ArrayUtil.getFirstElement(selectedRevisions));
}
else if (VcsDataKeys.FILE_PATH.is(dataId)) {
return myFilePath;
}
else if (VcsDataKeys.IO_FILE.is(dataId)) {
return myFilePath.getIOFile();
}
else if (CommonDataKeys.VIRTUAL_FILE.is(dataId)) {
VirtualFile virtualFile = myFilePath.getVirtualFile();
return virtualFile == null || !virtualFile.isValid() ? null : virtualFile;
}
else if (VcsDataKeys.FILE_HISTORY_PANEL.is(dataId)) {
return this;
}
else if (VcsDataKeys.HISTORY_SESSION.is(dataId)) {
return myHistorySession;
}
else if (VcsDataKeys.HISTORY_PROVIDER.is(dataId)) {
return myProvider;
}
else if (PlatformDataKeys.COPY_PROVIDER.is(dataId)) {
return this;
}
else if (PREVIOUS_REVISION_FOR_DIFF.is(dataId)) {
TableView<TreeNodeOnVcsRevision> flatView = myDualView.getFlatView();
if (flatView.getSelectedRow() == (flatView.getRowCount() - 1)) {
// no previous
return myBottomRevisionForShowDiff != null ? myBottomRevisionForShowDiff : VcsFileRevision.NULL;
}
else {
return flatView.getRow(flatView.getSelectedRow() + 1).getRevision();
}
}
else if (VcsInternalDataKeys.FILE_HISTORY_REFRESHER.is(dataId)) {
return myRefresherI;
}
else if (PlatformDataKeys.HELP_ID.is(dataId)) {
return myHelpId;
}
return null;
}
private Change @Nullable [] getChanges() {
final VcsFileRevision[] revisions = getSelectedRevisions();
if (revisions.length > 0) {
Arrays.sort(revisions, comparing(VcsRevisionDescription::getRevisionNumber));
for (VcsFileRevision revision : revisions) {
if (!myHistorySession.isContentAvailable(revision)) {
return null;
}
}
final ContentRevision startRevision = new LoadedContentRevision(myFilePath, revisions[0], myVcs.getProject());
final ContentRevision endRevision = (revisions.length == 1) ? new CurrentContentRevision(myFilePath) :
new LoadedContentRevision(myFilePath, revisions[revisions.length - 1], myVcs.getProject());
return new Change[]{new Change(startRevision, endRevision)};
}
return null;
}
private VirtualFile createVirtualFileForRevision(VcsFileRevision revision) {
if (!myRevisionToVirtualFile.containsKey(revision)) {
FilePath filePath = (revision instanceof VcsFileRevisionEx ? ((VcsFileRevisionEx)revision).getPath() : myFilePath);
myRevisionToVirtualFile.put(revision, filePath.isDirectory()
? new VcsVirtualFolder(filePath.getPath(), null, VcsFileSystem.getInstance())
: new VcsVirtualFile(filePath.getPath(), revision, VcsFileSystem.getInstance()));
}
return myRevisionToVirtualFile.get(revision);
}
public VcsFileRevision @NotNull [] getSelectedRevisions() {
//noinspection unchecked
List<TreeNodeOnVcsRevision> selection = (List<TreeNodeOnVcsRevision>)myDualView.getSelection();
VcsFileRevision[] result = new VcsFileRevision[selection.size()];
for (int i = 0; i < selection.size(); i++) {
result[i] = selection.get(i).getRevision();
}
return result;
}
@Override
public void dispose() {
myDualView.dispose();
}
private void refreshRevisionsOrder() {
final List<VcsFileRevision> list = myHistorySession.getRevisionList();
myRevisionsOrder.clear();
int cnt = 0;
for (VcsFileRevision revision : list) {
myRevisionsOrder.put(revision.getRevisionNumber(), cnt);
++cnt;
}
}
public void setIsStaticAndEmbedded(boolean isStaticAndEmbedded) {
myIsStaticAndEmbedded = isStaticAndEmbedded;
myDualView.setZipByHeight(isStaticAndEmbedded);
myDualView.getFlatView().updateColumnSizes();
if (myIsStaticAndEmbedded) {
myDualView.getFlatView().getTableHeader().setBorder(IdeBorderFactory.createBorder(SideBorder.TOP));
myDualView.getTreeView().getTableHeader().setBorder(IdeBorderFactory.createBorder(SideBorder.TOP));
myDualView.getFlatView().setBorder(null);
myDualView.getTreeView().setBorder(null);
}
}
public void setBottomRevisionForShowDiff(VcsFileRevision bottomRevisionForShowDiff) {
myBottomRevisionForShowDiff = bottomRevisionForShowDiff;
}
@Override
public boolean equals(Object obj) {
return obj instanceof FileHistoryPanelImpl && sameHistories((FileHistoryPanelImpl)obj, myFilePath, myStartingRevision);
}
@Override
public int hashCode() {
int result = myFilePath.hashCode();
result = 31 * result + (myStartingRevision != null ? myStartingRevision.asString().hashCode() : 0); // NB: asString to conform to equals
return result;
}
@Override
public void performCopy(@NotNull DataContext dataContext) {
String text = StringUtil.join(getSelectedRevisions(), revision -> getPresentableText(revision, true), "\n");
CopyPasteManager.getInstance().setContents(new StringSelection(text));
}
@Override
public boolean isCopyEnabled(@NotNull DataContext dataContext) {
//noinspection unchecked
return ((List<TreeNodeOnVcsRevision>)myDualView.getSelection()).size() > 0;
}
@Override
public boolean isCopyVisible(@NotNull DataContext dataContext) {
return true;
}
@Override
public void globalSchemeChange(EditorColorsScheme scheme) {
updateMessage();
}
private class TreeNodeColumnInfoWrapper<T extends Comparable<T>> extends FileHistoryColumnWrapper<T> {
TreeNodeColumnInfoWrapper(@NotNull ColumnInfo<VcsFileRevision, T> additionalColumn) {
super(additionalColumn);
}
@Override
protected DualView getDualView() {
return myDualView;
}
@Override
public TableCellRenderer getCustomizedRenderer(TreeNodeOnVcsRevision revision, @Nullable TableCellRenderer renderer) {
if (renderer instanceof BaseHistoryCellRenderer) {
((BaseHistoryCellRenderer)renderer)
.setCurrentRevision(myHistorySession.isCurrentRevision(revision.getRevision().getRevisionNumber()));
}
return renderer;
}
}
private abstract static class BaseHistoryCellRenderer extends ColoredTableCellRenderer {
private boolean myIsCurrentRevision = false;
protected SimpleTextAttributes getDefaultAttributes() {
return myIsCurrentRevision ? SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES : SimpleTextAttributes.REGULAR_ATTRIBUTES;
}
public void setCurrentRevision(boolean currentRevision) {
myIsCurrentRevision = currentRevision;
}
}
public static class RevisionColumnInfo extends ColumnInfo<VcsFileRevision, VcsRevisionNumber> {
@Nullable private final Comparator<VcsFileRevision> myComparator;
@NotNull private final ColoredTableCellRenderer myRenderer;
public RevisionColumnInfo(@Nullable Comparator<VcsFileRevision> comparator) {
super(VcsBundle.message("column.name.revision.version"));
myComparator = comparator;
myRenderer = new BaseHistoryCellRenderer() {
@Override
protected void customizeCellRenderer(JTable table, Object value, boolean selected, boolean hasFocus, int row, int column) {
setOpaque(selected);
append(VcsUtil.getShortRevisionString((VcsRevisionNumber)value), getDefaultAttributes());
SpeedSearchUtil.applySpeedSearchHighlighting(table, this, false, selected);
}
};
}
@Nullable
@Override
public VcsRevisionNumber valueOf(VcsFileRevision revision) {
return revision.getRevisionNumber();
}
@Nullable
@Override
public Comparator<VcsFileRevision> getComparator() {
return myComparator;
}
@Override
public String getPreferredStringValue() {
return StringUtil.repeatSymbol('m', 10);
}
@Nullable
@Override
public TableCellRenderer getRenderer(VcsFileRevision revision) {
return myRenderer;
}
}
public static class DateColumnInfo extends ColumnInfo<VcsFileRevision, Date> {
@NotNull private final ColoredTableCellRenderer myRenderer;
public DateColumnInfo() {
super(VcsBundle.message("column.name.revision.date"));
myRenderer = new BaseHistoryCellRenderer() {
@Override
protected void customizeCellRenderer(JTable table, Object value, boolean selected, boolean hasFocus, int row, int column) {
setOpaque(selected);
Date date = (Date)value;
if (date != null) {
append(DateFormatUtil.formatDateTime(date), getDefaultAttributes());
}
SpeedSearchUtil.applySpeedSearchHighlighting(table, this, false, selected);
}
};
}
@NotNull
@Override
public Comparator<VcsFileRevision> getComparator() {
return comparing(revision -> valueOf(revision));
}
@Nullable
@Override
public Date valueOf(VcsFileRevision revision) {
return revision.getRevisionDate();
}
@Override
public String getPreferredStringValue() {
return DateFormatUtil.formatDateTime(Clock.getTime() + 1000);
}
@Nullable
@Override
public TableCellRenderer getRenderer(VcsFileRevision revision) {
return myRenderer;
}
}
private static class AuthorCellRenderer extends BaseHistoryCellRenderer {
private String myTooltipText;
/**
* @noinspection MethodNamesDifferingOnlyByCase
*/
public void setTooltipText(final String text) {
myTooltipText = text;
}
@Override
protected void customizeCellRenderer(JTable table, @Nullable Object value, boolean selected, boolean hasFocus, int row, int column) {
setToolTipText(myTooltipText);
if (selected || hasFocus) {
setBackground(table.getSelectionBackground());
setForeground(table.getSelectionForeground());
}
else {
setBackground(table.getBackground());
setForeground(table.getForeground());
}
if (value != null) append(value.toString(), getDefaultAttributes());
SpeedSearchUtil.applySpeedSearchHighlighting(table, this, false, selected);
}
}
public static class AuthorColumnInfo extends ColumnInfo<VcsFileRevision, String> {
private final TableCellRenderer AUTHOR_RENDERER = new AuthorCellRenderer();
public AuthorColumnInfo() {
super(VcsBundle.message("column.name.revision.list.author"));
}
@Nullable
@Override
public TableCellRenderer getRenderer(VcsFileRevision revision) {
return AUTHOR_RENDERER;
}
@Override
public TableCellRenderer getCustomizedRenderer(VcsFileRevision revision, TableCellRenderer renderer) {
if (renderer instanceof AuthorCellRenderer) {
if (revision instanceof VcsFileRevisionEx) {
VcsFileRevisionEx ex = (VcsFileRevisionEx)revision;
StringBuilder sb = new StringBuilder(StringUtil.notNullize(ex.getAuthor()));
if (ex.getAuthorEmail() != null) sb.append(" <").append(ex.getAuthorEmail()).append(">");
if (ex.getCommitterName() != null && !Comparing.equal(ex.getAuthor(), ex.getCommitterName())) {
sb.append(", via ").append(ex.getCommitterName());
if (ex.getCommitterEmail() != null) sb.append(" <").append(ex.getCommitterEmail()).append(">");
}
((AuthorCellRenderer)renderer).setTooltipText(sb.toString());
}
}
return renderer;
}
@Nullable
@Override
public String valueOf(VcsFileRevision revision) {
if (revision instanceof VcsFileRevisionEx) {
if (!Comparing.equal(revision.getAuthor(), ((VcsFileRevisionEx)revision).getCommitterName())) {
return revision.getAuthor() + "*";
}
}
return revision.getAuthor();
}
@Override
@NonNls
public String getPreferredStringValue() {
return StringUtil.repeatSymbol('m', 14);
}
@NotNull
@Override
public Comparator<VcsFileRevision> getComparator() {
return comparing(revision -> valueOf(revision));
}
}
public static class MessageColumnInfo extends ColumnInfo<VcsFileRevision, String> {
private final ColoredTableCellRenderer myRenderer;
private final IssueLinkRenderer myIssueLinkRenderer;
public MessageColumnInfo(Project project) {
super(getCommitMessageTitle());
myRenderer = new BaseHistoryCellRenderer() {
@Override
protected void customizeCellRenderer(JTable table, Object value, boolean selected, boolean hasFocus, int row, int column) {
setOpaque(selected);
if (value instanceof String) {
String message = (String)value;
myIssueLinkRenderer.appendTextWithLinks(message, getDefaultAttributes());
SpeedSearchUtil.applySpeedSearchHighlighting(table, this, false, selected);
}
}
};
myIssueLinkRenderer = new IssueLinkRenderer(project, myRenderer);
}
@NotNull
public static String getSubject(@NotNull VcsFileRevision object) {
final String originalMessage = object.getCommitMessage();
if (originalMessage == null) return "";
int index = StringUtil.indexOfAny(originalMessage, "\n\r");
return index == -1 ? originalMessage : originalMessage.substring(0, index);
}
@Nullable
@Override
public String valueOf(VcsFileRevision revision) {
return getSubject(revision);
}
@Override
public String getPreferredStringValue() {
return StringUtil.repeatSymbol('m', 80);
}
@Nullable
@Override
public TableCellRenderer getRenderer(VcsFileRevision revision) {
return myRenderer;
}
@NotNull
@Override
public Comparator<VcsFileRevision> getComparator() {
return comparing(revision -> valueOf(revision));
}
}
private static class LoadedContentRevision implements ByteBackedContentRevision {
private final FilePath myFile;
private final VcsFileRevision myRevision;
private final Project myProject;
private LoadedContentRevision(final FilePath file, final VcsFileRevision revision, final Project project) {
myFile = file;
myRevision = revision;
myProject = project;
}
@Override
public String getContent() throws VcsException {
try {
return VcsHistoryUtil.loadRevisionContentGuessEncoding(myRevision, myFile.getVirtualFile(), myProject);
}
catch (IOException e) {
throw new VcsException(VcsBundle.message("message.text.cannot.load.revision", e.getLocalizedMessage()));
}
}
@Override
public byte @Nullable [] getContentAsBytes() throws VcsException {
try {
return VcsHistoryUtil.loadRevisionContent(myRevision);
}
catch (IOException e) {
throw new VcsException(VcsBundle.message("message.text.cannot.load.revision", e.getLocalizedMessage()));
}
}
@Override
@NotNull
public FilePath getFile() {
return myFile;
}
@Override
@NotNull
public VcsRevisionNumber getRevisionNumber() {
return myRevision.getRevisionNumber();
}
}
private static class MyTreeCellRenderer implements TreeCellRenderer {
private final TreeCellRenderer myDefaultCellRenderer;
private final Getter<? extends VcsHistorySession> myHistorySession;
MyTreeCellRenderer(final TreeCellRenderer defaultCellRenderer, final Getter<? extends VcsHistorySession> historySession) {
myDefaultCellRenderer = defaultCellRenderer;
myHistorySession = historySession;
}
@Override
public Component getTreeCellRendererComponent(JTree tree,
Object value,
boolean selected,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
final Component result = myDefaultCellRenderer.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus);
final TreePath path = tree.getPathForRow(row);
if (path == null) return result;
TreeNodeOnVcsRevision node = row >= 0 ? ((TreeNodeOnVcsRevision)path.getLastPathComponent()) : null;
if (node != null) {
if (myHistorySession.get().isCurrentRevision(node.getRevision().getRevisionNumber())) {
makeBold(result);
}
if (!selected && myHistorySession.get().isCurrentRevision(node.getRevision().getRevisionNumber())) {
result.setBackground(new JBColor(new Color(188, 227, 231), new Color(188, 227, 231)));
}
((JComponent)result).setOpaque(false);
}
else if (selected) {
result.setBackground(UIUtil.getTableSelectionBackground(true));
}
else {
result.setBackground(UIUtil.getTableBackground());
}
return result;
}
}
private static class MyCellWrapper implements CellWrapper {
private final Getter<? extends VcsHistorySession> myHistorySession;
MyCellWrapper(final Getter<? extends VcsHistorySession> historySession) {
myHistorySession = historySession;
}
@Override
public void wrap(Component component,
JTable table,
Object value,
boolean isSelected,
boolean hasFocus,
int row,
int column,
Object treeNode) {
VcsFileRevision revision = ((TreeNodeOnVcsRevision)treeNode).getRevision();
if (myHistorySession.get().isCurrentRevision(revision.getRevisionNumber())) {
makeBold(component);
}
}
}
private class MyShowAsTreeAction extends ToggleAction implements DumbAware {
MyShowAsTreeAction() {
super(VcsBundle.lazyMessage("action.name.show.files.as.tree"), PlatformIcons.SMALL_VCS_CONFIGURABLE);
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return VcsConfiguration.getInstance(myVcs.getProject()).SHOW_FILE_HISTORY_AS_TREE;
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
VcsConfiguration.getInstance(myVcs.getProject()).SHOW_FILE_HISTORY_AS_TREE = state;
chooseView();
}
}
private class MyShowDetailsAction extends ToggleAction implements DumbAware {
MyShowDetailsAction() {
super(VcsBundle.lazyMessage("action.ToggleAction.text.show.details"),
VcsBundle.lazyMessage("action.ToggleAction.description.show.details"), AllIcons.Actions.PreviewDetailsVertically);
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return VcsConfiguration.getInstance(myVcs.getProject()).SHOW_FILE_HISTORY_DETAILS;
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
VcsConfiguration.getInstance(myVcs.getProject()).SHOW_FILE_HISTORY_DETAILS = state;
setupDetails();
}
}
private static String getCommitMessageTitle() {
return VcsBundle.message("label.selected.revision.commit.message");
}
}
| |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.impl.transaction.command;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.neo4j.graphdb.TGraphNoImplementationException;
import org.neo4j.kernel.impl.core.Token;
import org.neo4j.kernel.impl.index.IndexCommand.AddNodeCommand;
import org.neo4j.kernel.impl.index.IndexCommand.AddRelationshipCommand;
import org.neo4j.kernel.impl.index.IndexCommand.CreateCommand;
import org.neo4j.kernel.impl.index.IndexCommand.DeleteCommand;
import org.neo4j.kernel.impl.index.IndexCommand.RemoveCommand;
import org.neo4j.kernel.impl.index.IndexDefineCommand;
import org.neo4j.kernel.impl.store.CommonAbstractStore;
import org.neo4j.kernel.impl.store.NeoStores;
import org.neo4j.kernel.impl.store.NodeStore;
import org.neo4j.kernel.impl.store.PropertyStore;
import org.neo4j.kernel.impl.store.SchemaStore;
import org.neo4j.kernel.impl.store.TokenStore;
import org.neo4j.kernel.impl.store.record.Abstract64BitRecord;
import org.neo4j.kernel.impl.store.record.DynamicRecord;
import org.neo4j.kernel.impl.store.record.PropertyBlock;
import org.neo4j.kernel.impl.store.record.TokenRecord;
import org.neo4j.kernel.impl.transaction.command.Command.LabelTokenCommand;
import org.neo4j.kernel.impl.transaction.command.Command.NeoStoreCommand;
import org.neo4j.kernel.impl.transaction.command.Command.NodeCommand;
import org.neo4j.kernel.impl.transaction.command.Command.NodeCountsCommand;
import org.neo4j.kernel.impl.transaction.command.Command.PropertyCommand;
import org.neo4j.kernel.impl.transaction.command.Command.PropertyKeyTokenCommand;
import org.neo4j.kernel.impl.transaction.command.Command.RelationshipCommand;
import org.neo4j.kernel.impl.transaction.command.Command.RelationshipCountsCommand;
import org.neo4j.kernel.impl.transaction.command.Command.RelationshipGroupCommand;
import org.neo4j.kernel.impl.transaction.command.Command.RelationshipTypeTokenCommand;
import org.neo4j.kernel.impl.transaction.command.Command.SchemaRuleCommand;
import org.neo4j.kernel.impl.transaction.command.Command.TokenCommand;
public class HighIdTransactionApplier implements CommandHandler
{
private final CommandHandler delegate;
private final NeoStores neoStores;
private final Map<CommonAbstractStore,HighId> highIds = new HashMap<>();
public HighIdTransactionApplier( CommandHandler delegate, NeoStores neoStores )
{
this.delegate = delegate;
this.neoStores = neoStores;
}
@Override
public boolean visitNodeTemporalPropertyCommand(Command.NodeTemporalPropertyCommand command) throws IOException {
return delegate.visitNodeTemporalPropertyCommand( command );
}
@Override
public boolean visitRelationshipTemporalPropertyCommand(Command.RelationshipTemporalPropertyCommand command) throws IOException {
return delegate.visitRelationshipTemporalPropertyCommand( command );
}
@Override
public boolean visitNodeCommand( NodeCommand command ) throws IOException
{
NodeStore nodeStore = neoStores.getNodeStore();
track( nodeStore, command );
track( nodeStore.getDynamicLabelStore(), command.getAfter().getDynamicLabelRecords() );
return delegate.visitNodeCommand( command );
}
@Override
public boolean visitRelationshipCommand( RelationshipCommand command ) throws IOException
{
track( neoStores.getRelationshipStore(), command );
return delegate.visitRelationshipCommand( command );
}
@Override
public boolean visitPropertyCommand( PropertyCommand command ) throws IOException
{
PropertyStore propertyStore = neoStores.getPropertyStore();
track( propertyStore, command );
for ( PropertyBlock block : command.getAfter() )
{
switch ( block.getType() )
{
case STRING:
track( propertyStore.getStringStore(), block.getValueRecords() );
break;
case ARRAY:
track( propertyStore.getArrayStore(), block.getValueRecords() );
break;
default:
// Not needed, no dynamic records then
break;
}
}
return delegate.visitPropertyCommand( command );
}
@Override
public boolean visitRelationshipGroupCommand( RelationshipGroupCommand command ) throws IOException
{
track( neoStores.getRelationshipGroupStore(), command );
return delegate.visitRelationshipGroupCommand( command );
}
@Override
public boolean visitRelationshipTypeTokenCommand( RelationshipTypeTokenCommand command ) throws IOException
{
trackToken( neoStores.getRelationshipTypeTokenStore(), command );
return delegate.visitRelationshipTypeTokenCommand( command );
}
@Override
public boolean visitLabelTokenCommand( LabelTokenCommand command ) throws IOException
{
trackToken( neoStores.getLabelTokenStore(), command );
return delegate.visitLabelTokenCommand( command );
}
@Override
public boolean visitPropertyKeyTokenCommand( PropertyKeyTokenCommand command ) throws IOException
{
trackToken( neoStores.getPropertyKeyTokenStore(), command );
return delegate.visitPropertyKeyTokenCommand( command );
}
@Override
public boolean visitSchemaRuleCommand( SchemaRuleCommand command ) throws IOException
{
SchemaStore schemaStore = neoStores.getSchemaStore();
for ( DynamicRecord record : command.getRecordsAfter() )
{
track( schemaStore, record.getId() );
}
return delegate.visitSchemaRuleCommand( command );
}
@Override
public boolean visitNeoStoreCommand( NeoStoreCommand command ) throws IOException
{
delegate.visitNeoStoreCommand( command );
return false;
}
@Override
public boolean visitIndexAddNodeCommand( AddNodeCommand command ) throws IOException
{
return delegate.visitIndexAddNodeCommand( command );
}
@Override
public boolean visitIndexAddRelationshipCommand( AddRelationshipCommand command ) throws IOException
{
return delegate.visitIndexAddRelationshipCommand( command );
}
@Override
public boolean visitIndexRemoveCommand( RemoveCommand command ) throws IOException
{
return delegate.visitIndexRemoveCommand( command );
}
@Override
public boolean visitIndexDeleteCommand( DeleteCommand command ) throws IOException
{
return delegate.visitIndexDeleteCommand( command );
}
@Override
public boolean visitIndexCreateCommand( CreateCommand command ) throws IOException
{
return delegate.visitIndexCreateCommand( command );
}
@Override
public boolean visitIndexDefineCommand( IndexDefineCommand command ) throws IOException
{
return delegate.visitIndexDefineCommand( command );
}
@Override
public boolean visitNodeCountsCommand( NodeCountsCommand command ) throws IOException
{
return delegate.visitNodeCountsCommand( command );
}
@Override
public boolean visitRelationshipCountsCommand( RelationshipCountsCommand command ) throws IOException
{
return delegate.visitRelationshipCountsCommand( command );
}
@Override
public void apply()
{
delegate.apply();
// Notifies the stores about the recovered ids and will bump those high ids atomically if
// they surpass the current high ids
for ( Map.Entry<CommonAbstractStore,HighId> highId : highIds.entrySet() )
{
highId.getKey().setHighestPossibleIdInUse( highId.getValue().id );
}
}
@Override
public void close()
{
delegate.close();
}
private void track( CommonAbstractStore store, long id )
{
HighId highId = highIds.get( store );
if ( highId == null )
{
highIds.put( store, new HighId( id ) );
}
else
{
highId.track( id );
}
}
private void track( CommonAbstractStore store, Command command )
{
track( store, command.getKey() );
}
private void track( CommonAbstractStore store, Collection<? extends Abstract64BitRecord> records )
{
for ( Abstract64BitRecord record : records )
{
track( store, record.getId() );
}
}
private <RECORD extends TokenRecord, TOKEN extends Token>
void trackToken( TokenStore<RECORD, TOKEN> tokenStore, TokenCommand<RECORD> tokenCommand )
{
track( tokenStore, tokenCommand );
track( tokenStore.getNameStore(), tokenCommand.getRecord().getNameRecords() );
}
private static class HighId
{
private long id;
public HighId( long id )
{
this.id = id;
}
void track( long id )
{
if ( id > this.id )
{
this.id = id;
}
}
}
}
| |
/*
* Copyright (c) 2010-2017 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.schema;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.result.OperationResultStatus;
import com.evolveum.midpoint.util.PrettyPrinter;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultType;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.util.List;
import static org.testng.AssertJUnit.assertEquals;
/**
* @author mederly
*
*/
public class TestOperationResult {
@BeforeSuite
public void setup() throws SchemaException, SAXException, IOException {
PrettyPrinter.setDefaultNamespacePrefix(MidPointConstants.NS_MIDPOINT_PUBLIC_PREFIX);
PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY);
}
@Test
public void testCleanup() throws Exception {
System.out.println("===[ testCleanup ]===");
// GIVEN (checks also conversions during result construction)
OperationResult root = new OperationResult("dummy");
checkResultConversion(root, true);
OperationResult sub1 = root.createSubresult("sub1");
checkResultConversion(root, true);
OperationResult sub11 = sub1.createMinorSubresult("sub11");
OperationResult sub12 = sub1.createMinorSubresult("sub12");
OperationResult sub13 = sub1.createSubresult("sub13");
OperationResult sub2 = root.createSubresult("sub2");
sub2.recordFatalError("Fatal");
checkResultConversion(root, true);
sub11.recordSuccess();
sub12.recordWarning("Warning");
sub13.recordSuccess();
checkResultConversion(root, true);
// WHEN
sub1.computeStatus();
sub1.cleanupResult();
root.computeStatus();
root.cleanupResult();
// THEN
System.out.println(root.debugDump());
assertEquals("Wrong overall status", OperationResultStatus.FATAL_ERROR, root.getStatus()); // because of sub2
assertEquals("Wrong status of sub1", OperationResultStatus.WARNING, sub1.getStatus()); // because of sub12
assertEquals("Wrong # of sub1 subresults", 2, sub1.getSubresults().size());
checkResultConversion(root, true);
}
@Test
public void testSummarizeByHiding() throws Exception {
System.out.println("===[ testSummarizeByHiding ]===");
// GIVEN
OperationResult root = new OperationResult("dummy");
OperationResult level1 = root.createSubresult("level1");
for (int i = 1; i <= 30; i++) {
OperationResult level2 = level1.createSubresult("level2");
level2.addParam("value", i);
level2.recordSuccess();
}
level1.computeStatus();
root.computeStatus();
// WHEN+THEN
root.summarize();
System.out.println("After shallow summarizing\n" + root.debugDump());
assertEquals("Level1 shouldn't be summarized this time", 30, level1.getSubresults().size());
root.summarize(true);
System.out.println("After deep summarizing\n" + root.debugDump());
assertEquals("Level1 should be summarized this time", 11, level1.getSubresults().size());
OperationResult summary = level1.getSubresults().get(10);
assertEquals("Wrong operation in summary", "level2", summary.getOperation());
assertEquals("Wrong status in summary", OperationResultStatus.SUCCESS, summary.getStatus());
assertEquals("Wrong hidden records count in summary", 20, summary.getHiddenRecordsCount());
checkResultConversion(root, true);
}
@Test
public void testExplicitSummarization() throws Exception {
System.out.println("===[ testExplicitSummarization ]===");
// GIVEN
OperationResult root = new OperationResult("dummy");
OperationResult level1 = root.createSubresult("level1");
level1.setSummarizeSuccesses(true);
for (int i = 1; i <= 30; i++) {
OperationResult level2 = level1.createSubresult("level2");
level2.addParam("value", i);
level2.recordStatus(OperationResultStatus.SUCCESS, "message");
}
level1.computeStatus();
root.computeStatus();
// WHEN+THEN
root.summarize();
System.out.println("After shallow summarizing\n" + root.debugDump());
assertEquals("Level1 shouldn't be summarized this time", 30, level1.getSubresults().size());
root.summarize(true);
System.out.println("After deep summarizing\n" + root.debugDump());
assertEquals("Level1 should be summarized this time", 1, level1.getSubresults().size());
OperationResult summary = level1.getSubresults().get(0);
assertEquals("Wrong operation in summary", "level2", summary.getOperation());
assertEquals("Wrong status in summary", OperationResultStatus.SUCCESS, summary.getStatus());
assertEquals("Wrong message in summary", "message", summary.getMessage());
assertEquals("Wrong count in summary", 30, summary.getCount());
checkResultConversion(root, false); // summarization settings are not serialized
}
@Test
public void testIncrementalSummarization() throws Exception {
System.out.println("===[ testIncrementalSummarization ]===");
OperationResult root = new OperationResult("dummy");
int b = 0;
for (int a = 1; a <= 30; a++) {
OperationResult operationA = root.createSubresult("operationA");
operationA.addParam("valueA", a);
operationA.recordStatus(OperationResultStatus.SUCCESS, "messageA");
if (a % 2 == 1) {
OperationResult operationB = root.createSubresult("operationB");
operationB.addParam("valueB", ++b);
operationB.recordStatus(OperationResultStatus.WARNING, "messageB");
}
OperationResult operationC = root.createSubresult("operationC." + a); // will not be summarized
operationC.addParam("valueC", a);
operationC.recordStatus(OperationResultStatus.SUCCESS, "messageC");
root.summarize();
System.out.println("After iteration " + a + ":\n" + root.debugDump());
int expectedA = a < 10 ? a : 10;
int expectedB = b < 10 ? b : 10;
int expectedC = a;
int expectedSummarizationA = a <= 10 ? 0 : 1;
int expectedSummarizationB = b <= 10 ? 0 : 1;
int expectedTotal = expectedA + expectedB + expectedC + expectedSummarizationA + expectedSummarizationB;
if (b > 10) {
assertEquals("Wrong # of subresults", expectedTotal, root.getSubresults().size());
List<OperationResult> lastTwo = root.getSubresults().subList(expectedTotal - 2, expectedTotal);
OperationResult sumA, sumB;
if ("operationA".equals(lastTwo.get(0).getOperation())) {
sumA = lastTwo.get(0);
sumB = lastTwo.get(1);
} else {
sumA = lastTwo.get(1);
sumB = lastTwo.get(0);
}
assertEquals("Wrong operation in summary for A", "operationA", sumA.getOperation());
assertEquals("Wrong operation in summary for B", "operationB", sumB.getOperation());
assertEquals("Wrong status in summary for A", OperationResultStatus.SUCCESS, sumA.getStatus());
assertEquals("Wrong status in summary for B", OperationResultStatus.WARNING, sumB.getStatus());
assertEquals("Wrong hidden records count in summary for A", a-expectedA, sumA.getHiddenRecordsCount());
assertEquals("Wrong hidden records count in summary for B", b-expectedB, sumB.getHiddenRecordsCount());
}
}
checkResultConversion(root, true);
}
private void checkResultConversion(OperationResult result, boolean assertEquals) throws SchemaException {
// WHEN
OperationResultType resultType = result.createOperationResultType();
String serialized = PrismTestUtil.getPrismContext().serializerFor(PrismContext.LANG_XML).serializeAnyData(resultType, SchemaConstants.C_RESULT);
System.out.println("Converted OperationResultType\n" + serialized);
OperationResult resultRoundTrip = OperationResult.createOperationResult(resultType);
OperationResultType resultTypeRoundTrip = resultRoundTrip.createOperationResultType();
// THEN
assertEquals("Operation result conversion changes the result (OperationResultType)", resultType, resultTypeRoundTrip);
if (assertEquals) {
assertEquals("Operation result conversion changes the result (OperationResult)", result, resultRoundTrip);
}
}
}
| |
package com.ragstorooks.blacktomove.chess;
import com.ragstorooks.blacktomove.chess.blocks.Board;
import com.ragstorooks.blacktomove.chess.blocks.Colour;
import com.ragstorooks.blacktomove.chess.blocks.Position;
import com.ragstorooks.blacktomove.chess.moves.BasicMove;
import com.ragstorooks.blacktomove.chess.moves.EnPassantableEvent;
import com.ragstorooks.blacktomove.chess.moves.KingsideCastle;
import com.ragstorooks.blacktomove.chess.moves.Promotion;
import com.ragstorooks.blacktomove.chess.pieces.King;
import com.ragstorooks.blacktomove.chess.pieces.Piece;
import com.ragstorooks.blacktomove.chess.pieces.PieceType;
import com.ragstorooks.blacktomove.chess.pieces.Queen;
import com.ragstorooks.blacktomove.chess.pieces.Rook;
import com.ragstorooks.blacktomove.chess.moves.QueensideCastle;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatcher;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.argThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class GameTest {
private Map<String, Piece> pieces = new HashMap<>();
@Mock
private Piece whitePawn;
@Mock
private Piece whiteRook;
@Mock
private Piece whiteKing;
@Mock
private Piece blackPawn;
@Mock
private Piece blackRook;
@Mock
private Piece blackKing;
@Mock
private Board gameBoard;
@Mock
private Board tempBoard;
private Game game;
@Before
public void setupWhiteRook() {
when(whiteRook.getColour()).thenReturn(Colour.White);
when(whiteRook.getPieceType()).thenReturn(PieceType.ROOK);
}
@Before
public void setupBlackRook() {
when(blackRook.getColour()).thenReturn(Colour.Black);
when(blackRook.getPieceType()).thenReturn(PieceType.ROOK);
}
@Before
public void setupWhiteKing() {
when(whiteKing.getColour()).thenReturn(Colour.White);
when(whiteKing.getPieceType()).thenReturn(PieceType.KING);
}
@Before
public void setupBlackKing() {
when(blackKing.getColour()).thenReturn(Colour.Black);
when(blackKing.getPieceType()).thenReturn(PieceType.KING);
}
@Before
public void setupWhitePawn() {
when(whitePawn.getColour()).thenReturn(Colour.White);
when(whitePawn.getPieceType()).thenReturn(PieceType.PAWN);
}
@Before
public void setupBlackPawn() {
when(blackPawn.getColour()).thenReturn(Colour.Black);
when(blackPawn.getPieceType()).thenReturn(PieceType.PAWN);
}
@Before
public void setupPieces() {
pieces.put("a1", whiteRook);
pieces.put("e1", whiteKing);
pieces.put("h1", whiteRook);
pieces.put("e2", whitePawn);
pieces.put("a8", blackRook);
pieces.put("e8", blackKing);
pieces.put("h8", blackRook);
pieces.put("e7", blackPawn);
}
@Before
public void setupBoards() {
when(gameBoard.copy()).thenReturn(tempBoard);
setupBoard(gameBoard);
setupBoard(tempBoard);
game = new Game(gameBoard);
}
private void setupBoard(Board board) {
doAnswer(new Answer() {
@Override
public Piece answer(InvocationOnMock invocation) throws Throwable {
return pieces.get((String) invocation.getArguments()[0]);
}
}).when(board).get(isA(String.class));
doAnswer(new Answer<Map<String, Piece>>() {
@Override
public Map<String, Piece> answer(InvocationOnMock invocation) throws Throwable {
Map<String, Piece> result = new HashMap<>();
Colour invokingColour = (Colour) invocation.getArguments()[0];
pieces.entrySet().stream().filter(entry -> invokingColour.equals(entry.getValue().getColour()))
.forEach(entry -> result.put(entry.getKey(), entry.getValue()));
return result;
}
}).when(board).getPiecesOfColour(isA(Colour.class));
doAnswer(new Answer<Map<String, Piece>>() {
@Override
public Map<String, Piece> answer(InvocationOnMock invocation) throws Throwable {
Map<String, Piece> result = new HashMap<>();
Colour invokingColour = (Colour) invocation.getArguments()[0];
PieceType pieceType = (PieceType) invocation.getArguments()[1];
pieces.entrySet().stream().filter(entry -> invokingColour.equals(entry.getValue().getColour()) &&
pieceType.equals(entry.getValue().getPieceType())).forEach(entry -> result.put(entry.getKey()
, entry.getValue()));
return result;
}
}).when(board).getPiecesOfType(isA(Colour.class), isA(PieceType.class));
}
@Test
public void testThatGameMetadataIsStored() {
// act
game.addMeta("key1", "value1").addMeta("key2", "value2");
// verify
Map<String, String> metadata = game.getMetadata();
assertThat(metadata.size(), equalTo(2));
assertThat(metadata.get("key1"), equalTo("value1"));
assertThat(metadata.get("key2"), equalTo("value2"));
}
@Test
public void testThatCurrentBoardPositionIsReturned() {
// setup
when(gameBoard.toString()).thenReturn("test board");
// act
String boardPosition = game.getCurrentBoardPosition();
// verify
assertThat(boardPosition, equalTo("test board"));
}
@Test
public void testThatBasicMoveIsMadeIfNoCheck() {
// setup
when(whitePawn.canMoveTo(eq("e2"), eq("e4"), eq(false), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new BasicMove(Colour.White, PieceType.PAWN, "e4", false, null));
// assert
verify(gameBoard).put("e2", null);
verify(gameBoard).put("e4", whitePawn);
}
@Test(expected = IllegalArgumentException.class)
public void testThatBasicMoveIsNotMadeIfItResultsInCheck() {
// setup
when(blackRook.canMoveTo(eq("h8"), eq("e1"), eq(true), isA(Position.class))).thenReturn(true);
when(whitePawn.canMoveTo(eq("e2"), eq("e4"), eq(false), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new BasicMove(Colour.White, PieceType.PAWN, "e4", false, null));
}
@Test
public void testThatPromotionWorks() {
// setup
when(whitePawn.canMoveTo(eq("e2"), eq("e8"), eq(false), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new Promotion(Colour.White, PieceType.QUEEN, "e8", false, null));
// assert
verify(gameBoard).put("e2", null);
verify(gameBoard).put(eq("e8"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof Queen) && ((Queen) argument).getColour().equals(Colour.White);
}
}));
}
@Test
public void testKingsideCastleIfValid() {
// act
game.makeMove(new KingsideCastle(Colour.White));
// assert
verify(gameBoard).put("e1", null);
verify(gameBoard).put("h1", null);
verify(gameBoard).put(eq("g1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof King) && ((King) argument).getColour().equals(Colour.White);
}
}));
verify(gameBoard).put(eq("f1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof Rook) && ((Rook) argument).getColour().equals(Colour.White);
}
}));
}
@Test(expected = IllegalArgumentException.class)
public void testKingsideCastleFailsIfBlackRookCanMoveToe1() {
// setup
when(blackRook.canMoveTo(eq("h8"), eq("e1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new KingsideCastle(Colour.White));
}
private void setupTempBoardToReturnKingOnSquare(String square) {
doAnswer(new Answer<Map<String, Piece>>() {
@Override
public Map<String, Piece> answer(InvocationOnMock invocation) throws Throwable {
Map<String, Piece> result = new HashMap<>();
Colour invokingColour = (Colour) invocation.getArguments()[0];
PieceType pieceType = (PieceType) invocation.getArguments()[1];
if (Colour.White.equals(invokingColour) && PieceType.KING.equals(pieceType)) {
result.put(square, whiteKing);
return result;
}
pieces.entrySet().stream().filter(entry -> invokingColour.equals(entry.getValue().getColour()) &&
pieceType.equals(entry.getValue().getPieceType())).forEach(entry -> result.put(entry.getKey()
, entry.getValue()));
return result;
}
}).when(tempBoard).getPiecesOfType(isA(Colour.class), isA(PieceType.class));
}
@Test(expected = IllegalArgumentException.class)
public void testKingsideCastleFailsIfBlackRookCanMoveTof1() {
// setup
setupTempBoardToReturnKingOnSquare("f1");
when(blackRook.canMoveTo(eq("h8"), eq("f1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new KingsideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testKingsideCastleFailsIfBlackRookCanMoveTog1() {
// setup
setupTempBoardToReturnKingOnSquare("g1");
when(blackRook.canMoveTo(eq("h8"), eq("g1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new KingsideCastle(Colour.White));
}
@Test
public void testKingsideCastleIsValidEvenIfBlackRookAttacksh1() {
// setup
when(blackRook.canMoveTo(eq("h8"), eq("h1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new KingsideCastle(Colour.White));
// assert
verify(gameBoard).put("e1", null);
verify(gameBoard).put("h1", null);
verify(gameBoard).put(eq("g1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof King) && ((King) argument).getColour().equals(Colour.White);
}
}));
verify(gameBoard).put(eq("f1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof Rook) && ((Rook) argument).getColour().equals(Colour.White);
}
}));
}
@Test(expected = IllegalArgumentException.class)
public void testKingsideCastleFailsIfPieceExistsOnf1() {
// setup
pieces.put("f1", whiteRook);
// act
game.makeMove(new KingsideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testKingsideCastleFailsIfPieceExistsOng1() {
// setup
pieces.put("g1", whiteRook);
// act
game.makeMove(new KingsideCastle(Colour.White));
}
@Test
public void testQueensideCastleIfValid() {
// act
game.makeMove(new QueensideCastle(Colour.White));
// assert
verify(gameBoard).put("e1", null);
verify(gameBoard).put("a1", null);
verify(gameBoard).put(eq("c1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof King) && ((King) argument).getColour().equals(Colour.White);
}
}));
verify(gameBoard).put(eq("d1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof Rook) && ((Rook) argument).getColour().equals(Colour.White);
}
}));
}
@Test(expected = IllegalArgumentException.class)
public void testQueensideCastleFailsIfBlackRookCanMoveToe1() {
// setup
when(blackRook.canMoveTo(eq("h8"), eq("e1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new QueensideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testQueensideCastleFailsIfBlackRookCanMoveTod1() {
// setup
setupTempBoardToReturnKingOnSquare("d1");
when(blackRook.canMoveTo(eq("h8"), eq("d1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new QueensideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testQueensideCastleFailsIfBlackRookCanMoveToc1() {
// setup
setupTempBoardToReturnKingOnSquare("c1");
when(blackRook.canMoveTo(eq("h8"), eq("c1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new QueensideCastle(Colour.White));
}
public void testQueensideCastleIsValidEvenIfBlackRookAttacksb1() {
// setup
when(blackRook.canMoveTo(eq("h8"), eq("b1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new QueensideCastle(Colour.White));
// assert
verify(gameBoard).put("e1", null);
verify(gameBoard).put("a1", null);
verify(gameBoard).put(eq("c1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof King) && ((King) argument).getColour().equals(Colour.White);
}
}));
verify(gameBoard).put(eq("d1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof Rook) && ((Rook) argument).getColour().equals(Colour.White);
}
}));
}
@Test
public void testQueensideCastleIsValidEvenIfBlackRookAttacksa1() {
// setup
when(blackRook.canMoveTo(eq("h8"), eq("a1"), eq(true), isA(Position.class))).thenReturn(true);
// act
game.makeMove(new QueensideCastle(Colour.White));
// assert
verify(gameBoard).put("e1", null);
verify(gameBoard).put("a1", null);
verify(gameBoard).put(eq("c1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof King) && ((King) argument).getColour().equals(Colour.White);
}
}));
verify(gameBoard).put(eq("d1"), argThat(new ArgumentMatcher<Piece>() {
@Override
public boolean matches(Object argument) {
return (argument instanceof Rook) && ((Rook) argument).getColour().equals(Colour.White);
}
}));
}
@Test(expected = IllegalArgumentException.class)
public void testQueensideCastleFailsIfPieceExistsOnd1() {
// setup
pieces.put("d1", whiteRook);
// act
game.makeMove(new QueensideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testQueensideCastleFailsIfPieceExistsOnc1() {
// setup
pieces.put("c1", whiteRook);
// act
game.makeMove(new QueensideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testQueensideCastleFailsIfPieceExistsOnb1() {
// setup
pieces.put("b1", whiteRook);
// act
game.makeMove(new QueensideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testThatKingsideCastleShouldFailIfWhiteKingHasAlreadyMoved() {
// setup
try {
when(whiteKing.canMoveTo(eq("e1"), eq("f1"), eq(false), isA(Position.class))).thenReturn(true);
game.makeMove(new BasicMove(Colour.White, PieceType.KING, "f1", false, null));
} catch(Exception e) {
fail("No exceptions to be caught in the setup portion of the method");
}
// act
game.makeMove(new KingsideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testThatKingsideCastleShouldFailIfWhiteKingsideRookHasAlreadyMoved() {
// setup
try {
when(whiteRook.canMoveTo(eq("h1"), eq("f1"), eq(false), isA(Position.class))).thenReturn(true);
game.makeMove(new BasicMove(Colour.White, PieceType.ROOK, "f1", false, null));
} catch(Exception e) {
fail("No exceptions to be caught in the setup portion of the method");
}
// act
game.makeMove(new KingsideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testThatQueensideCastleShouldFailIfWhiteKingHasAlreadyMoved() {
// setup
try {
when(whiteKing.canMoveTo(eq("e1"), eq("f1"), eq(false), isA(Position.class))).thenReturn(true);
game.makeMove(new BasicMove(Colour.White, PieceType.KING, "f1", false, null));
} catch(Exception e) {
fail("No exceptions to be caught in the setup portion of the method");
}
// act
game.makeMove(new QueensideCastle(Colour.White));
}
@Test(expected = IllegalArgumentException.class)
public void testThatQueensideCastleShouldFailIfWhiteQueensideRookHasAlreadyMoved() {
// setup
try {
when(whiteRook.canMoveTo(eq("a1"), eq("f1"), eq(false), isA(Position.class))).thenReturn(true);
game.makeMove(new BasicMove(Colour.White, PieceType.ROOK, "f1", false, null));
} catch(Exception e) {
fail("No exceptions to be caught in the setup portion of the method");
}
// act
game.makeMove(new QueensideCastle(Colour.White));
}
@Test
public void shouldNotAllowEnPassantLikeCaptureIfNotEnPassantable() {
// setup
when(blackPawn.canMoveTo(eq("e7"), eq("f3"), eq(true), isA(Position.class))).thenReturn(true);
game.notify(null);
// act
game.makeMove(new BasicMove(Colour.Black, PieceType.PAWN, "f3", true, "e"));
// verify
verify(gameBoard, never()).put("f4", null);
}
@Test
public void shouldAllowEnPassantLikeCaptureIfEnPassantable() {
// setup
when(blackPawn.canMoveTo(eq("e7"), eq("f3"), eq(true), isA(Position.class))).thenReturn(true);
game.notify(new EnPassantableEvent("f3"));
// act
game.makeMove(new BasicMove(Colour.Black, PieceType.PAWN, "f3", true, "e"));
// verify
verify(gameBoard).put("f4", null);
}
}
| |
package hybridstats;
import java.io.PrintWriter;
import java.util.*;
import java.util.Map.Entry;
import mdwUtils.DoubleList;
import pal.tree.*;
import pal.misc.*;
import palExtensions.ExtRandom;
import palExtensions.IdGroupUtils;
import palExtensions.NeoSplitSystem;
import palExtensions.NeoSplitUtils;
import palExtensions.Split;
/**
*
*
* @author woodhams
*
*/
/*
* TODO: Consider making this class Iterable
*/
public class SplitCounts implements IdGroup {
private static final long serialVersionUID = -6924983916412736269L;
private static final ExtRandom DEFAULT_RNG = new ExtRandom(4); // for shuffling for resolving ties in greedy consensus tree
/*
* I spent considerable time trying to make HashMap<Split,Integer> work, but splits with the same
* hashCode() still got entered separately in the HashMap, so use two maps keyed by hex string instead
* TODO: fix this. This workaround is ugly.
*/
private HashMap<String,Integer> counts;
private HashMap<String,Split> splits;
private Vector<String> sortedSplits; // Splits sorted by frequency, ties randomly resolved. Cached result: is null until first needed.
private Vector<Split> greedySplits; // Derived from sortedSplits, the splits in the greedy consensus tree. Is null until first needed.
private Vector<Integer> greedySplitIndex; // where in sortedSplits the greedySplits occur.
private IdGroup idg; // all splits must have the same IdGroup, to ensure consistent ordering of taxa.
private int nTrees; // when adding splits a tree at a time, how many trees were added?
private boolean splitsAddedOnlyViaTrees;
private int nSplits; // total number of splits. Equal to the sum of values in 'counts'.
private ExtRandom shuffler = DEFAULT_RNG;
/*
* I see danger here: NeoSplitSystem is an IdGroup, for which we also have a constructor
*/
public SplitCounts(NeoSplitSystem system) {
this();
addSplitSystem__(system);
}
public SplitCounts(Forest forest) {
this();
this.addForest(forest);
}
public SplitCounts(IdGroup idGroup) {
this();
setIdGroup(idGroup);
}
/*
* If this constructor is used, 'id' will be set by first call to addSplitSystem.
*/
public SplitCounts() {
counts = new HashMap<String,Integer>();
splits = new HashMap<String,Split>();
idg = null;
nTrees = 0;
nSplits =0;
splitsAddedOnlyViaTrees = true;
sortedSplits = null;
greedySplits = null;
greedySplitIndex = null;
}
public int totalNumberSplits() {
return nSplits;
}
/*
* Sets idg if not already set.
* Throws error if try to set a non-equivalent idg.
*/
private void setIdGroup(IdGroup idGroup) {
if (idg == null) idg = idGroup;
if (!IdGroupUtils.equals(idg,idGroup)) throw new IllegalArgumentException("Tried to add split on different taxon set");
}
public void addForest(Forest forest) {
for (Tree tree : forest) {
this.addTree(tree);
}
}
/**
* Using this method invalidates majorityRuleConsensusTree() method
* @param splitSys
*/
public void addSplitSystem(NeoSplitSystem splitSys) {
splitsAddedOnlyViaTrees=false;
addSplitSystem__(splitSys);
}
private void addSplitSystem__(NeoSplitSystem splitSys) {
setIdGroup(splitSys.getIdGroup());
for (Split split : splitSys) {
nSplits++;
String hex = split.toHexString();
if (counts.containsKey(hex)) {
counts.put(hex, counts.get(hex)+1);
} else {
counts.put(hex, 1);
splits.put(hex,split);
}
}
}
public void addTree(Tree tree) {
nTrees++;
NeoSplitSystem splitSys = NeoSplitUtils.getSplits(tree);
addSplitSystem__(splitSys);
}
/*
* If you want your own random number generator to control the shuffling (e.g. so you can set the seed).
*/
public void setRNG(ExtRandom rng) {
this.shuffler = rng;
}
public Tree majorityRuleConsensusTree() {
// Somebody used 'addSplitSystem()' method to supply splits to this count.
if (!splitsAddedOnlyViaTrees) throw new RuntimeException("Can't determine consensus tree unless splits added only via trees");
NeoSplitSystem consensus = new NeoSplitSystem(idg);
int majority = nTrees/2+1;
for (String hex : splits.keySet()) {
if (counts.get(hex)>=majority) {
consensus.add(splits.get(hex));
}
}
return NeoSplitUtils.treeFromSplits(consensus);
}
/**
* Returns the sum of the Robinson Foulds distances between the
* majority rules consensus tree and each tree in the collection.
* This does not actually require calculating the majority rule tree.
* @return
*/
public int sumRFtoMajRuleTree() {
// Somebody used 'addSplitSystem()' method to supply splits to this count.
if (!splitsAddedOnlyViaTrees) throw new RuntimeException("Can't determine consensus tree unless splits added only via trees");
int sumDist = 0;
for (Integer count : counts.values()) {
sumDist += Math.min(count, nTrees-count);
}
return sumDist;
}
/**
* For each pair of splits:
* If one or both occur <threshold> or fewer times, count 0.
* If they are compatible, count 0.
* Else reduce the count of each by <threshold>, multiply these counts, add to the sum.
* @param threshold
* @return
*/
public int weightedPairwiseSplitIncompatibility (int threshold) {
int n = splits.size();
int sum = 0;
String[] keySet = new String[n];
counts.keySet().toArray(keySet);
for (int i=0; i<n-1; i++) {
int count1 = counts.get(keySet[i]);
if (count1<=threshold) continue;
count1 -= threshold;
for (int j=i+1; j<n; j++) {
int count2 = counts.get(keySet[j]);
if (count2>threshold && !splits.get(keySet[i]).compatible(splits.get(keySet[j]))) {
sum += count1*(count2-threshold);
}
}
}
return sum;
}
/**
* Return the total number of incompatible split pairs in the split collection
* @return
*/
public int weightedPairwiseSplitIncompatibility() {
return weightedPairwiseSplitIncompatibility(0);
}
public void printInternodeCertainties(PrintWriter out) {
DoubleList<Split,Double> ic = getICs();
for (int i=0; i<ic.size(); i++) {
out.printf("IC = %f for split %s\n", ic.getB(i), ic.getA(i).toString());
}
}
/**
* Returns the sum of Internode Certainties over a greedy consensus tree.
* @return
*/
public double treeCertainty() {
double tc=0;
DoubleList<Split,Double> ic = getICs();
for (int i=0; i<ic.size(); i++) {
tc += ic.getB(i);
}
return tc;
}
/**
* Returns the sum of Internode Certainty All over a greedy consensus tree.
* @return
*/
public double treeCertaintyAll(int threshold) {
double tca=0;
DoubleList<Split,Double> ic = getICAs(threshold);
for (int i=0; i<ic.size(); i++) {
tca += ic.getB(i);
}
return tca;
}
/* The comparator to use whenever we sort splits in this SplitCounts object */
private final Comparator<String> frequencyComparator = new Comparator<String>() {
public int compare(String s1, String s2) {
return counts.get(s2)-counts.get(s1);
}
};
/*
* Will redo the sort-by-frequency even if it has already been done.
* Clears greedySplits.
*/
private void resortSplits() {
sortedSplits = new Vector<String>(counts.size()); // to be sorted by frequency
for (String splitStr : counts.keySet()) {
sortedSplits.add(splitStr);
}
shuffler.shuffle(sortedSplits);
Collections.sort(sortedSplits,frequencyComparator);
greedySplits = null;
greedySplitIndex = null;
}
/*
* Ensure sortedSplits is populated. Don't recalculate if it is.
*/
private void sortSplits() {
if (sortedSplits==null) resortSplits();
}
/*
* Does not recalculate if greedySplits are already cached.
*/
private void findGreedySplits() {
if (greedySplits != null) return;
sortSplits();
int nGreedySplits = idg.getIdCount()-3;
greedySplits = new Vector<Split>(nGreedySplits);
greedySplitIndex = new Vector<Integer>(nGreedySplits);
for (int i=0; i<sortedSplits.size() && greedySplits.size() < nGreedySplits; i++) {
String splitStr = sortedSplits.elementAt(i);
Split split = splits.get(splitStr);
if (split.compatible(greedySplits)) {
greedySplits.add(split);
greedySplitIndex.add(i);
}
}
}
/*
* If recalculate = true, will recalculate greedy consensus tree with
* different random tie breaks, even if a greedy consensus tree is already cached.
* If recalculate = false, will recalculate only if no tree is already cached.
*/
public Tree greedyConsensusTree(boolean recalculate) {
if (recalculate) resortSplits();
findGreedySplits();
return NeoSplitUtils.treeFromSplits(greedySplits);
}
/*
* Return a twin list of the greedy consensus tree splits and their internode certainties
*/
public DoubleList<Split,Double> getICs() {
DoubleList<Split,Double> ic = new DoubleList<Split,Double>();
List<DoubleList<Split,Integer>> greedyTreeConflicts = findConflictingSplitCounts(2, true);
for (DoubleList<Split,Integer> splitList : greedyTreeConflicts) {
ic.add(splitList.getA(0), internodeCertainty(splitList));
}
return ic;
}
/*
* Return a twin list of the greedy consensus tree splits and their ICA (internode certainty all)
* scores, with a threshold for which incompatible splits are included in the ICA calculation
*/
public DoubleList<Split,Double> getICAs(int threshold) {
DoubleList<Split,Double> ica = new DoubleList<Split,Double>();
List<DoubleList<Split,Integer>> greedyTreeConflicts = findConflictingSplitCounts(threshold, false);
for (DoubleList<Split,Integer> splitList : greedyTreeConflicts) {
ica.add(splitList.getA(0), internodeCertainty(splitList));
}
return ica;
}
/*
* If thresholdIsLength is true, threshold = max number of splits to consider. (Use 2 to
* get the classic Internode Certainty: main split plus strongest competitor.)
* If thresholdIsLength is false, threshold = min split weight for split to make the list.
*/
public List<DoubleList<Split,Integer>> findConflictingSplitCounts(int threshold, boolean thresholdIsLength) {
findGreedySplits();
List<DoubleList<Split,Integer>> results = new Vector<DoubleList<Split,Integer>>(greedySplits.size());
// could be more efficient by storing where the greedy splits appear in the sorted list.
for (int i : greedySplitIndex) {
String splitStr = sortedSplits.elementAt(i);
Split split = splits.get(splitStr);
if (!thresholdIsLength && counts.get(splitStr)<threshold) break; // ignore splits with frequency below threshold
DoubleList<Split,Integer> splitList = new DoubleList<Split,Integer>();
splitList.add(split, counts.get(splitStr));
// Only check splits after this one in sorted list: ones before this one are guaranteed to be
// compatible, else this split would not be in the greedy list.
for (int j=i+1; j<sortedSplits.size(); j++) {
String otherSplitString = sortedSplits.elementAt(j);
if (!thresholdIsLength && counts.get(otherSplitString)<threshold) break; // ignore conflicting splits with frequency below threshold
Split otherSplit = splits.get(otherSplitString);
if (!split.compatible(otherSplit)) {
splitList.add(otherSplit, counts.get(otherSplitString));
} // if !compatible
if (thresholdIsLength && splitList.size()==threshold) break; // have enough secondary splits now
} // for otherSplit (j)
results.add(splitList);
} // for i over sortedSplits
return results;
}
/*
* Salichos Stamatakis and Rokas, MBE v31 p1261 (2014)
* If only two splits are listed, returns IC, the Internode Certainty.
* If more splits are listed, returns ICA, (IC All).
*/
private double internodeCertainty(DoubleList<Split,Integer> splits) {
int sum = 0;
int n = splits.size(); // number of splits under consideration
for (int i=0; i<n; i++) sum += splits.getB(i);
double ic = 1;
if (n>1) {
ic = Math.log(n);
for (int i=0; i<n; i++) {
double p = ((double)splits.getB(i))/sum;
ic += p*Math.log(p);
}
ic /= Math.log(n); // convert from natural log to log base n
}
return ic;
}
/**
*
* @return f[] where f[i]==c indicates there were c splits which were present in exactly i trees.
*/
public int[] countByFrequency() {
int max=0;
if (splitsAddedOnlyViaTrees) max = nTrees;
else for (int n : counts.values()) max = Math.max(max, n);
int[] freq = new int[max];
for (int i : counts.values()) freq[i-1]++;
return freq;
}
public int numUniqueCherries() {
int count = 0;
for (Split split : splits.values()) {
if (split.sizeOfSmaller()==2) count++;
}
return count;
}
/**
* Crude display of the object contents:
* Splits as hex numbers, then count.
*/
public void hexDump(PrintWriter out) {
for (Entry<String,Integer> entry : counts.entrySet()) {
out.printf("%s: %d\n", entry.getKey(), entry.getValue());
}
}
/**
* Replacement for hexDump
*/
public void tempDump(PrintWriter out) {
for (String key : counts.keySet()) {
out.printf("%s: %d\n", splits.get(key).toString(), counts.get(key));
}
}
public int getCount(String key) {
return (counts.containsKey(key)) ? counts.get(key) : 0;
}
public int getCount(Split split) {
return getCount(split.toHexString());
}
public Split getSplit(String key) {
return splits.get(key);
}
public Iterator<String> getHexIterator() {
return splits.keySet().iterator();
}
public String[] getHexArray() {
String[] array = new String[splits.size()];
splits.keySet().toArray(array);
return array;
}
public int numUniqueSplits() {
return splits.size();
}
/*
* Methods to implement IdGroup, which just pass through to 'idg' member
*/
@Override
public int getIdCount() { return idg.getIdCount(); }
@Override
public Identifier getIdentifier(int i) {return idg.getIdentifier(i); }
@Override
public void setIdentifier(int i, Identifier id) { idg.setIdentifier(i, id); };
@Override
public int whichIdNumber(String name) {return idg.whichIdNumber(name); }
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution;
import com.intellij.codeInsight.daemon.impl.analysis.JavaModuleGraphUtil;
import com.intellij.debugger.impl.GenericDebuggerRunnerSettings;
import com.intellij.diagnostic.logging.OutputFileUtil;
import com.intellij.execution.configurations.*;
import com.intellij.execution.filters.ArgumentFileFilter;
import com.intellij.execution.impl.ConsoleBuffer;
import com.intellij.execution.process.*;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ProgramRunner;
import com.intellij.execution.target.*;
import com.intellij.execution.target.local.LocalTargetEnvironment;
import com.intellij.execution.testDiscovery.JavaAutoRunManager;
import com.intellij.execution.testframework.*;
import com.intellij.execution.testframework.actions.AbstractRerunFailedTestsAction;
import com.intellij.execution.testframework.autotest.AbstractAutoTestManager;
import com.intellij.execution.testframework.autotest.ToggleAutoTestAction;
import com.intellij.execution.testframework.sm.SMTestRunnerConnectionUtil;
import com.intellij.execution.testframework.sm.runner.SMRunnerConsolePropertiesProvider;
import com.intellij.execution.testframework.sm.runner.SMTRunnerConsoleProperties;
import com.intellij.execution.testframework.sm.runner.ui.SMTRunnerConsoleView;
import com.intellij.execution.testframework.sm.runner.ui.SMTestRunnerResultsForm;
import com.intellij.execution.testframework.ui.BaseTestsOutputConsoleView;
import com.intellij.execution.util.JavaParametersUtil;
import com.intellij.execution.util.ProgramParametersConfigurator;
import com.intellij.execution.util.ProgramParametersUtil;
import com.intellij.execution.wsl.target.WslTargetEnvironmentConfiguration;
import com.intellij.execution.wsl.target.WslTargetEnvironmentRequest;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.compiler.JavaCompilerBundle;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.JavaSdkVersion;
import com.intellij.openapi.projectRoots.JdkUtil;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.ex.JavaSdkUtil;
import com.intellij.openapi.roots.CompilerModuleExtension;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiJavaModule;
import com.intellij.psi.PsiPackage;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.GlobalSearchScopesCore;
import com.intellij.util.PathUtil;
import com.intellij.util.net.NetUtils;
import com.intellij.util.ui.UIUtil;
import org.jdom.Element;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.jps.model.serialization.PathMacroUtil;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.nio.charset.StandardCharsets;
import java.util.*;
public abstract class JavaTestFrameworkRunnableState<T extends
ModuleBasedConfiguration<JavaRunConfigurationModule, Element>
& CommonJavaRunConfigurationParameters
& ConfigurationWithCommandLineShortener
& SMRunnerConsolePropertiesProvider> extends JavaCommandLineState implements RemoteConnectionCreator {
private static final Logger LOG = Logger.getInstance(JavaTestFrameworkRunnableState.class);
private static final ExtensionPointName<JUnitPatcher> JUNIT_PATCHER_EP = new ExtensionPointName<>("com.intellij.junitPatcher");
private static final String JIGSAW_OPTIONS = "Jigsaw Options";
public static ParamsGroup getJigsawOptions(JavaParameters parameters) {
return parameters.getVMParametersList().getParamsGroup(JIGSAW_OPTIONS);
}
private @Nullable TargetBoundServerSocket myTargetBoundServerSocket;
protected File myTempFile;
protected File myWorkingDirsFile = null;
private RemoteConnectionCreator remoteConnectionCreator;
private final List<ArgumentFileFilter> myArgumentFileFilters = new ArrayList<>();
@Nullable private volatile TargetProgressIndicator myTargetProgressIndicator = null;
@Nullable
protected final ServerSocket getServerSocket() {
return myTargetBoundServerSocket != null ? myTargetBoundServerSocket.getServerSocket() : null;
}
public void setRemoteConnectionCreator(RemoteConnectionCreator remoteConnectionCreator) {
this.remoteConnectionCreator = remoteConnectionCreator;
}
@Nullable
@Override
public RemoteConnection createRemoteConnection(ExecutionEnvironment environment) {
return remoteConnectionCreator == null
? super.createRemoteConnection(environment)
: remoteConnectionCreator.createRemoteConnection(environment);
}
@Override
public boolean isPollConnection() {
return remoteConnectionCreator != null ? remoteConnectionCreator.isPollConnection() : super.isPollConnection();
}
public JavaTestFrameworkRunnableState(ExecutionEnvironment environment) {
super(environment);
}
@NotNull
protected abstract String getFrameworkName();
@NotNull
protected abstract String getFrameworkId();
protected abstract void passTempFile(ParametersList parametersList, String tempFilePath);
@NotNull
protected abstract T getConfiguration();
@Nullable
protected abstract TestSearchScope getScope();
@NotNull
protected abstract String getForkMode();
@NotNull
private OSProcessHandler createHandler(Executor executor, SMTestRunnerResultsForm viewer) throws ExecutionException {
appendForkInfo(executor);
appendRepeatMode();
TargetEnvironment remoteEnvironment = getEnvironment().getPreparedTargetEnvironment(this, TargetProgressIndicator.EMPTY);
TargetedCommandLineBuilder targetedCommandLineBuilder = getTargetedCommandLine();
TargetedCommandLine targetedCommandLine = targetedCommandLineBuilder.build();
resolveServerSocketPort(remoteEnvironment);
Process process = remoteEnvironment.createProcess(targetedCommandLine, new EmptyProgressIndicator());
SearchForTestsTask searchForTestsTask = createSearchingForTestsTask(remoteEnvironment);
if (searchForTestsTask != null) {
searchForTestsTask.arrangeForIndexAccess();
searchForTestsTask.setIncompleteIndexUsageCallback(() -> viewer.setIncompleteIndexUsed());
}
OSProcessHandler processHandler = new KillableColoredProcessHandler.Silent(process,
targetedCommandLine
.getCommandPresentation(remoteEnvironment),
targetedCommandLine.getCharset(),
targetedCommandLineBuilder.getFilesToDeleteOnTermination());
ProcessTerminatedListener.attach(processHandler);
if (searchForTestsTask != null) {
searchForTestsTask.attachTaskToProcess(processHandler);
}
return processHandler;
}
@Override
public TargetEnvironmentRequest createCustomTargetEnvironmentRequest() {
// Don't call getJavaParameters() because it will perform too much initialization
WslTargetEnvironmentConfiguration config = checkCreateWslConfiguration(getJdk());
return config == null ? null : new WslTargetEnvironmentRequest(config);
}
public void resolveServerSocketPort(@NotNull TargetEnvironment remoteEnvironment) throws ExecutionException {
if (myTargetBoundServerSocket != null) {
myTargetBoundServerSocket.bind(remoteEnvironment);
}
}
/**
* @deprecated Use {@link #createSearchingForTestsTask(TargetEnvironment)} instead
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
public @Nullable SearchForTestsTask createSearchingForTestsTask() throws ExecutionException {
return null;
}
public @Nullable SearchForTestsTask createSearchingForTestsTask(@NotNull TargetEnvironment targetEnvironment) throws ExecutionException {
return createSearchingForTestsTask();
}
protected boolean configureByModule(Module module) {
return module != null;
}
protected boolean isIdBasedTestTree() {
return false;
}
@Override
public void prepareTargetEnvironmentRequest(@NotNull TargetEnvironmentRequest request,
@NotNull TargetProgressIndicator targetProgressIndicator) throws ExecutionException {
myTargetProgressIndicator = targetProgressIndicator;
T myConfiguration = getConfiguration();
if (myConfiguration.getProjectPathOnTarget() != null) {
request.setProjectPathOnTarget(myConfiguration.getProjectPathOnTarget());
}
super.prepareTargetEnvironmentRequest(request, targetProgressIndicator);
}
/**
* Returns the current {@link TargetProgressIndicator} if the call happens
* within the execution of {@code prepareTargetEnvironmentRequest(...)}.
*
* @return the current {@link TargetProgressIndicator} if it is present and
* {@code null} otherwise
*/
@ApiStatus.Internal
protected final @Nullable TargetProgressIndicator getTargetProgressIndicator() {
return myTargetProgressIndicator;
}
@NotNull
@Override
protected TargetedCommandLineBuilder createTargetedCommandLine(@NotNull TargetEnvironmentRequest request)
throws ExecutionException {
TargetedCommandLineBuilder commandLineBuilder = super.createTargetedCommandLine(request);
File inputFile = InputRedirectAware.getInputFile(getConfiguration());
if (inputFile != null) {
commandLineBuilder.setInputFile(request.getDefaultVolume().createUpload(inputFile.getAbsolutePath()));
}
Map<String, String> content = commandLineBuilder.getUserData(JdkUtil.COMMAND_LINE_CONTENT);
if (content != null) {
content.forEach((key, value) -> myArgumentFileFilters.add(new ArgumentFileFilter(key, value)));
}
return commandLineBuilder;
}
@NotNull
@Override
public ExecutionResult execute(@NotNull Executor executor, @NotNull ProgramRunner<?> runner) throws ExecutionException {
final RunnerSettings runnerSettings = getRunnerSettings();
final SMTRunnerConsoleProperties testConsoleProperties = getConfiguration().createTestConsoleProperties(executor);
testConsoleProperties.setIfUndefined(TestConsoleProperties.HIDE_PASSED_TESTS, false);
final BaseTestsOutputConsoleView consoleView =
UIUtil.invokeAndWaitIfNeeded(() -> SMTestRunnerConnectionUtil.createConsole(getFrameworkName(), testConsoleProperties));
final SMTestRunnerResultsForm viewer = ((SMTRunnerConsoleView)consoleView).getResultsViewer();
Disposer.register(getConfiguration().getProject(), consoleView);
OSProcessHandler handler = createHandler(executor, viewer);
for (ArgumentFileFilter filter : myArgumentFileFilters) {
consoleView.addMessageFilter(filter);
}
consoleView.attachToProcess(handler);
final AbstractTestProxy root = viewer.getRoot();
if (root instanceof TestProxyRoot) {
((TestProxyRoot)root).setHandler(handler);
}
handler.addProcessListener(new ProcessAdapter() {
@Override
public void startNotified(@NotNull ProcessEvent event) {
if (getConfiguration().isSaveOutputToFile()) {
final File file = OutputFileUtil.getOutputFile(getConfiguration());
root.setOutputFilePath(file != null ? file.getAbsolutePath() : null);
}
}
@Override
public void processTerminated(@NotNull ProcessEvent event) {
Runnable runnable = () -> {
root.flushOutputFile();
deleteTempFiles();
clear();
};
UIUtil.invokeLaterIfNeeded(runnable);
handler.removeProcessListener(this);
}
});
AbstractRerunFailedTestsAction rerunFailedTestsAction = testConsoleProperties.createRerunFailedTestsAction(consoleView);
LOG.assertTrue(rerunFailedTestsAction != null);
rerunFailedTestsAction.setModelProvider(() -> viewer);
final DefaultExecutionResult result = new DefaultExecutionResult(consoleView, handler);
result.setRestartActions(rerunFailedTestsAction, new ToggleAutoTestAction() {
@Override
public boolean isDelayApplicable() {
return false;
}
@Override
public AbstractAutoTestManager getAutoTestManager(Project project) {
return JavaAutoRunManager.getInstance(project);
}
});
JavaRunConfigurationExtensionManager.getInstance().attachExtensionsToProcess(getConfiguration(), handler, runnerSettings);
return result;
}
protected abstract void configureRTClasspath(JavaParameters javaParameters, Module module) throws CantRunException;
protected Sdk getJdk() {
Project project = getConfiguration().getProject();
final Module module = getConfiguration().getConfigurationModule().getModule();
return module == null ? ProjectRootManager.getInstance(project).getProjectSdk() : ModuleRootManager.getInstance(module).getSdk();
}
@Override
protected JavaParameters createJavaParameters() throws ExecutionException {
final JavaParameters javaParameters = new JavaParameters();
Project project = getConfiguration().getProject();
final Module module = getConfiguration().getConfigurationModule().getModule();
javaParameters.setJdk(getJdk());
final String parameters = getConfiguration().getProgramParameters();
getConfiguration().setProgramParameters(null);
try {
JavaParametersUtil.configureConfiguration(javaParameters, getConfiguration());
}
finally {
getConfiguration().setProgramParameters(parameters);
}
configureClasspath(javaParameters);
javaParameters.getClassPath().addFirst(JavaSdkUtil.getIdeaRtJarPath());
javaParameters.setShortenCommandLine(getConfiguration().getShortenCommandLine(), project);
for (JUnitPatcher patcher : JUNIT_PATCHER_EP.getExtensionList()) {
patcher.patchJavaParameters(project, module, javaParameters);
}
JavaRunConfigurationExtensionManager.getInstance()
.updateJavaParameters(getConfiguration(), javaParameters, getRunnerSettings(), getEnvironment().getExecutor());
if (!StringUtil.isEmptyOrSpaces(parameters)) {
javaParameters.getProgramParametersList().addAll(getNamedParams(parameters));
}
if (ConsoleBuffer.useCycleBuffer()) {
javaParameters.getVMParametersList().addProperty("idea.test.cyclic.buffer.size", String.valueOf(ConsoleBuffer.getCycleBufferSize()));
}
return javaParameters;
}
protected List<String> getNamedParams(String parameters) {
return Collections.singletonList("@name" + parameters);
}
private ServerSocket myForkSocket = null;
@Nullable
public ServerSocket getForkSocket() {
if (myForkSocket == null && (!Comparing.strEqual(getForkMode(), "none") || forkPerModule()) && getRunnerSettings() != null) {
try {
myForkSocket = new ServerSocket(0, 0, InetAddress.getByName("127.0.0.1"));
}
catch (IOException e) {
LOG.error(e);
}
}
return myForkSocket;
}
private boolean isExecutorDisabledInForkedMode() {
final RunnerSettings settings = getRunnerSettings();
return settings != null && !(settings instanceof GenericDebuggerRunnerSettings);
}
public void appendForkInfo(Executor executor) throws ExecutionException {
final String forkMode = getForkMode();
if (Comparing.strEqual(forkMode, "none")) {
if (forkPerModule()) {
if (isExecutorDisabledInForkedMode()) {
final String actionName = executor.getActionName();
throw new CantRunException(JavaCompilerBundle.message("action.disabled.when.per.module.working.directory.configured",
actionName));
}
}
else {
return;
}
}
else if (isExecutorDisabledInForkedMode()) {
final String actionName = executor.getActionName();
throw new CantRunException(JavaCompilerBundle.message("action.disabled.in.fork.mode", actionName,
StringUtil.toLowerCase(actionName)));
}
final JavaParameters javaParameters = getJavaParameters();
final Sdk jdk = javaParameters.getJdk();
if (jdk == null) {
throw new ExecutionException(ExecutionBundle.message("run.configuration.error.no.jdk.specified"));
}
try {
final File tempFile = FileUtil.createTempFile("command.line", "", true);
try (PrintWriter writer = new PrintWriter(tempFile, StandardCharsets.UTF_8)) {
ShortenCommandLine shortenCommandLine = getConfiguration().getShortenCommandLine();
boolean useDynamicClasspathForForkMode = shortenCommandLine == null
? JdkUtil.useDynamicClasspath(getConfiguration().getProject())
: shortenCommandLine != ShortenCommandLine.NONE;
if (shortenCommandLine == ShortenCommandLine.ARGS_FILE) {
//see com.intellij.rt.execution.testFrameworks.ForkedByModuleSplitter.startChildFork
writer.println(shortenCommandLine);
}
else if (useDynamicClasspathForForkMode && forkPerModule()) {
writer.println("use classpath jar");
}
else {
writer.println("");
}
writer.println(((JavaSdkType)jdk.getSdkType()).getVMExecutablePath(jdk));
for (String vmParameter : javaParameters.getVMParametersList().getList()) {
writer.println(vmParameter);
}
}
passForkMode(getForkMode(), tempFile, javaParameters);
}
catch (IOException e) {
LOG.error(e);
}
}
protected abstract void passForkMode(String forkMode, File tempFile, JavaParameters parameters) throws ExecutionException;
protected void collectListeners(JavaParameters javaParameters, StringBuilder buf, String epName, String delimiter) {
final T configuration = getConfiguration();
for (final Object listener : Extensions.getRootArea().getExtensionPoint(epName).getExtensionList()) {
boolean enabled = true;
for (RunConfigurationExtension ext : RunConfigurationExtension.EP_NAME.getExtensionList()) {
if (ext.isListenerDisabled(configuration, listener, getRunnerSettings())) {
enabled = false;
break;
}
}
if (enabled) {
if (buf.length() > 0) buf.append(delimiter);
final Class<?> classListener = listener.getClass();
buf.append(classListener.getName());
javaParameters.getClassPath().add(PathUtil.getJarPathForClass(classListener));
}
}
}
protected void configureClasspath(final JavaParameters javaParameters) throws CantRunException {
RunConfigurationModule configurationModule = getConfiguration().getConfigurationModule();
final String jreHome = getTargetEnvironmentRequest() == null && getConfiguration().isAlternativeJrePathEnabled()
? getConfiguration().getAlternativeJrePath()
: null;
final int pathType = JavaParameters.JDK_AND_CLASSES_AND_TESTS;
Module module = configurationModule.getModule();
if (configureByModule(module)) {
JavaParametersUtil.configureModule(configurationModule, javaParameters, pathType, jreHome);
LOG.assertTrue(module != null);
if (JavaSdkUtil.isJdkAtLeast(javaParameters.getJdk(), JavaSdkVersion.JDK_1_9)) {
configureModulePath(javaParameters, module);
}
}
else {
JavaParametersUtil.configureProject(getConfiguration().getProject(), javaParameters, pathType, jreHome);
}
configureRTClasspath(javaParameters, module);
}
protected static PsiJavaModule findJavaModule(Module module, boolean inTests) {
return DumbService.getInstance(module.getProject())
.computeWithAlternativeResolveEnabled(() -> JavaModuleGraphUtil.findDescriptorByModule(module, inTests));
}
private void configureModulePath(JavaParameters javaParameters, @NotNull Module module) {
if (!useModulePath()) return;
PsiJavaModule testModule = findJavaModule(module, true);
if (testModule != null) {
//adding the test module explicitly as it is unreachable from `idea.rt`
ParametersList vmParametersList = javaParameters
.getVMParametersList()
.addParamsGroup(JIGSAW_OPTIONS)
.getParametersList();
vmParametersList.add("--add-modules");
vmParametersList.add(testModule.getName());
//setup module path
JavaParametersUtil.putDependenciesOnModulePath(javaParameters, testModule, true);
}
else {
PsiJavaModule prodModule = findJavaModule(module, false);
if (prodModule != null) {
splitDepsBetweenModuleAndClasspath(javaParameters, module, prodModule);
}
}
}
/**
* Put dependencies reachable from module-info located in production sources on the module path
* leave all other dependencies on the class path as is
*/
private void splitDepsBetweenModuleAndClasspath(JavaParameters javaParameters, Module module, PsiJavaModule prodModule) {
CompilerModuleExtension compilerExt = CompilerModuleExtension.getInstance(module);
if (compilerExt == null) return;
JavaParametersUtil.putDependenciesOnModulePath(javaParameters, prodModule, true);
ParametersList vmParametersList = javaParameters.getVMParametersList()
.addParamsGroup(JIGSAW_OPTIONS)
.getParametersList();
String prodModuleName = prodModule.getName();
//ensure test output is merged to the production module
VirtualFile testOutput = compilerExt.getCompilerOutputPathForTests();
if (testOutput != null) {
vmParametersList.add("--patch-module");
vmParametersList.add(new CompositeParameterTargetedValue().addLocalPart(prodModuleName + "=").addPathPart(testOutput.getPath()));
}
//ensure test dependencies missing from production module descriptor are available in tests
//todo enumerate all test dependencies explicitly
vmParametersList.add("--add-reads");
vmParametersList.add(prodModuleName + "=ALL-UNNAMED");
//open packages with tests to test runner
List<String> opensOptions = new ArrayList<>();
collectPackagesToOpen(opensOptions);
for (String option : opensOptions) {
if (option.isEmpty()) continue;
vmParametersList.add("--add-opens");
vmParametersList.add(prodModuleName + "/" + option + "=ALL-UNNAMED");
}
//ensure production module is explicitly added as test starter in `idea-rt` doesn't depend on it
vmParametersList.add("--add-modules");
vmParametersList.add(prodModuleName);
}
protected void collectPackagesToOpen(List<String> options) { }
/**
* called on EDT
*/
protected static void collectSubPackages(List<String> options, PsiPackage aPackage, GlobalSearchScope globalSearchScope) {
if (aPackage.getClasses(globalSearchScope).length > 0) {
options.add(aPackage.getQualifiedName());
}
PsiPackage[] subPackages = aPackage.getSubPackages(globalSearchScope);
for (PsiPackage subPackage : subPackages) {
collectSubPackages(options, subPackage, globalSearchScope);
}
}
protected void createServerSocket(JavaParameters javaParameters) {
try {
myTargetBoundServerSocket = TargetBoundServerSocket.fromRequest(getTargetEnvironmentRequest());
int localPort = myTargetBoundServerSocket.getLocalPort();
AsyncPromise<String> hostPortPromise = myTargetBoundServerSocket.getHostPortPromise();
javaParameters.getProgramParametersList()
.add(new CompositeParameterTargetedValue("-socket").addTargetPart(String.valueOf(localPort), hostPortPromise));
}
catch (IOException e) {
LOG.error(e);
}
}
protected boolean spansMultipleModules(final String qualifiedName) {
if (qualifiedName != null) {
final Project project = getConfiguration().getProject();
final PsiPackage aPackage = JavaPsiFacade.getInstance(project).findPackage(qualifiedName);
if (aPackage != null) {
final TestSearchScope scope = getScope();
if (scope != null) {
final SourceScope sourceScope = scope.getSourceScope(getConfiguration());
if (sourceScope != null) {
final GlobalSearchScope configurationSearchScope = GlobalSearchScopesCore.projectTestScope(project).intersectWith(
sourceScope.getGlobalSearchScope());
final PsiDirectory[] directories = aPackage.getDirectories(configurationSearchScope);
return Arrays.stream(directories)
.map(dir -> ModuleUtilCore.findModuleForFile(dir.getVirtualFile(), project))
.filter(Objects::nonNull)
.distinct()
.count() > 1;
}
}
}
}
return false;
}
/**
* Configuration based on a package spanning multiple modules.
*/
protected boolean forkPerModule() {
return getScope() != TestSearchScope.SINGLE_MODULE &&
toChangeWorkingDirectory(getConfiguration().getWorkingDirectory()) &&
ReadAction.compute(() -> spansMultipleModules(getConfiguration().getPackage()));
}
private static boolean toChangeWorkingDirectory(final String workingDirectory) {
//noinspection deprecation
return PathMacroUtil.DEPRECATED_MODULE_DIR.equals(workingDirectory) ||
PathMacroUtil.MODULE_WORKING_DIR.equals(workingDirectory) ||
ProgramParametersConfigurator.MODULE_WORKING_DIR.equals(workingDirectory);
}
protected void createTempFiles(JavaParameters javaParameters) {
try {
myWorkingDirsFile = FileUtil.createTempFile("idea_working_dirs_" + getFrameworkId(), ".tmp", true);
javaParameters.getProgramParametersList()
.add(new CompositeParameterTargetedValue().addLocalPart("@w@").addPathPart(myWorkingDirsFile));
myTempFile = FileUtil.createTempFile("idea_" + getFrameworkId(), ".tmp", true);
passTempFile(javaParameters.getProgramParametersList(), myTempFile.getAbsolutePath());
}
catch (Exception e) {
LOG.error(e);
}
}
protected void writeClassesPerModule(String packageName,
JavaParameters javaParameters,
Map<Module, List<String>> perModule,
@NotNull String filters) throws IOException {
if (perModule != null) {
final String classpath = getScope() == TestSearchScope.WHOLE_PROJECT
? null : javaParameters.getClassPath().getPathsString();
T configuration = getConfiguration();
String workingDirectory = configuration.getWorkingDirectory();
//when only classpath should be changed, e.g. for starting tests in IDEA's project when some modules can never appear on the same classpath,
//like plugin and corresponding IDE register the same components twice
boolean toChangeWorkingDirectory = toChangeWorkingDirectory(workingDirectory);
try (PrintWriter wWriter = new PrintWriter(myWorkingDirsFile, StandardCharsets.UTF_8)) {
Project project = configuration.getProject();
String jreHome = configuration.isAlternativeJrePathEnabled() ? configuration.getAlternativeJrePath() : null;
wWriter.println(packageName);
for (Module module : perModule.keySet()) {
wWriter.println(toChangeWorkingDirectory ? ProgramParametersUtil.getWorkingDir(configuration, project, module)
: workingDirectory);
wWriter.println(module.getName());
if (classpath == null) {
final JavaParameters parameters = new JavaParameters();
try {
JavaParametersUtil.configureModule(module, parameters, JavaParameters.JDK_AND_CLASSES_AND_TESTS, jreHome);
if (JavaSdkUtil.isJdkAtLeast(parameters.getJdk(), JavaSdkVersion.JDK_1_9)) {
configureModulePath(parameters, module);
}
configureRTClasspath(parameters, module);
parameters.getClassPath().add(JavaSdkUtil.getIdeaRtJarPath());
writeClasspath(wWriter, parameters);
}
catch (CantRunException e) {
writeClasspath(wWriter, javaParameters);
}
}
else {
writeClasspath(wWriter, javaParameters);
}
final List<String> classNames = perModule.get(module);
wWriter.println(classNames.size());
for (String className : classNames) {
wWriter.println(className);
}
wWriter.println(filters);
}
}
}
}
private static void writeClasspath(PrintWriter wWriter, JavaParameters parameters) {
wWriter.println(parameters.getClassPath().getPathsString());
wWriter.println(parameters.getModulePath().getPathsString());
ParamsGroup paramsGroup = getJigsawOptions(parameters);
if (paramsGroup == null) {
wWriter.println(0);
}
else {
List<String> parametersList = paramsGroup.getParametersList().getList();
wWriter.println(parametersList.size());
for (String option : parametersList) {
wWriter.println(option);
}
}
}
protected void deleteTempFiles() {
if (myTempFile != null) {
FileUtil.delete(myTempFile);
}
if (myWorkingDirsFile != null) {
FileUtil.delete(myWorkingDirsFile);
}
}
public void appendRepeatMode() throws ExecutionException { }
protected boolean useModulePath() {
return true;
}
private final static class TargetBoundServerSocket {
private final int myLocalPort;
private final @Nullable TargetEnvironment.LocalPortBinding myLocalPortBinding;
/**
* Guards {@link #myServerSocket}.
*/
private final @NotNull Object myLock = new Object();
private @Nullable ServerSocket myServerSocket;
private final @NotNull AsyncPromise<String> myHostPortPromise = new AsyncPromise<>();
private TargetBoundServerSocket(int localPort) {
myLocalPortBinding = null;
myLocalPort = localPort;
}
private TargetBoundServerSocket(@NotNull TargetEnvironment.LocalPortBinding localPortBinding) {
myLocalPortBinding = localPortBinding;
myLocalPort = localPortBinding.getLocal();
}
public int getLocalPort() {
return myLocalPort;
}
public @NotNull AsyncPromise<String> getHostPortPromise() {
return myHostPortPromise;
}
public void bind(@NotNull TargetEnvironment targetEnvironment) throws ExecutionException {
String hostPort;
try {
String serverHost;
boolean local = targetEnvironment instanceof LocalTargetEnvironment;
if (local) {
serverHost = "127.0.0.1";
hostPort = String.valueOf(myLocalPort);
}
else {
ResolvedPortBinding resolvedPortBinding = targetEnvironment.getLocalPortBindings().get(myLocalPortBinding);
serverHost = resolvedPortBinding.getLocalEndpoint().getHost();
HostPort targetHostPort = resolvedPortBinding.getTargetEndpoint();
hostPort = targetHostPort.getHost() + ":" + targetHostPort.getPort();
}
createServerSocketImpl(serverHost);
}
catch (IOException e) {
throw new ExecutionException(e);
}
myHostPortPromise.setResult(hostPort);
}
private @NotNull ServerSocket createServerSocketImpl(@NotNull String host) throws IOException {
synchronized (myLock) {
if (myServerSocket != null) {
throw new IllegalStateException("Server socket already created");
}
ServerSocket socket = new ServerSocket(myLocalPort, 0, InetAddress.getByName(host));
myServerSocket = socket;
return socket;
}
}
public @NotNull ServerSocket getServerSocket() {
synchronized (myLock) {
if (myServerSocket == null) {
throw new IllegalStateException("Server socket must be resolved");
}
return myServerSocket;
}
}
@NotNull
public static TargetBoundServerSocket fromRequest(@Nullable TargetEnvironmentRequest targetEnvironmentRequest) throws IOException {
int serverPort = NetUtils.findAvailableSocketPort();
if (targetEnvironmentRequest != null) {
TargetEnvironment.LocalPortBinding localPortBinding = new TargetEnvironment.LocalPortBinding(serverPort, null);
targetEnvironmentRequest.getLocalPortBindings().add(localPortBinding);
return new TargetBoundServerSocket(localPortBinding);
}
else {
return new TargetBoundServerSocket(serverPort);
}
}
}
}
| |
/**
*
* @author grog (at) myrobotlab.org
*
* This file is part of MyRobotLab (http://myrobotlab.org).
*
* MyRobotLab is free software: you can redistribute it and/or modify
* it under the terms of the Apache License 2.0 as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version (subject to the "Classpath" exzception
* as provided in the LICENSE.txt file that accompanied this code).
*
* MyRobotLab is distributed in the hope that it will be useful or fun,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Apache License 2.0 for more details.
*
* All libraries in thirdParty bundle are subject to their own license
* requirements - please refer to http://myrobotlab.org/libraries for
* details.
*
* Enjoy !
*
* Dependencies:
* sphinx4-1.0beta6
* google recognition - a network connection is required
*
* References:
* Swapping Grammars - http://cmusphinx.sourceforge.net/wiki/sphinx4:swappinggrammars
*
* http://cmusphinx.sourceforge.net/sphinx4/javadoc/edu/cmu/sphinx/jsgf/JSGFGrammar.html#loadJSGF(java.lang.String)
* TODO - loadJSGF - The JSGF grammar specified by grammarName will be loaded from the base url (tossing out any previously loaded grammars)
*
*
* */
package org.myrobotlab.service;
import java.io.File;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.myrobotlab.framework.Message;
import org.myrobotlab.framework.Service;
import org.myrobotlab.io.FileIO;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.Logging;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.service.abstracts.AbstractSpeechRecognizer;
import org.myrobotlab.service.data.Locale;
import org.myrobotlab.service.interfaces.SpeechSynthesis;
import org.myrobotlab.service.interfaces.TextListener;
import org.slf4j.Logger;
import edu.cmu.sphinx.frontend.util.Microphone;
import edu.cmu.sphinx.recognizer.Recognizer;
import edu.cmu.sphinx.result.Result;
import edu.cmu.sphinx.util.props.ConfigurationManager;
/**
*
* Sphinx - Speech recognition based on CMU Sphinx. This service must be told
* what it's listening for. It does not do free-form speech recognition.
*
*/
@Deprecated /* we need another offline solution - one that doesn't suck */
public class Sphinx extends AbstractSpeechRecognizer {
class SpeechProcessor extends Thread {
Sphinx myService = null;
public boolean isRunning = false;
public SpeechProcessor(Sphinx myService) {
super(myService.getName() + "_ear");
this.myService = myService;
}
@Override
public void run() {
try {
isRunning = true;
info(String.format("starting speech processor thread %s_ear", myService.getName()));
String newPath = FileIO.getCfgDir() + File.separator + myService.getName() + ".xml";
File localGramFile = new File(newPath);
info("loading grammar file");
if (localGramFile.exists()) {
info(String.format("grammar config %s", newPath));
cm = new ConfigurationManager(newPath);
} else {
// resource in jar default
info(String.format("grammar /resource/Sphinx/simple.xml"));
cm = new ConfigurationManager(this.getClass().getResource(FileIO.gluePaths(getResourceDir(), "/Sphinx/simple.xml")));
}
info("starting recognizer");
// start the word recognizer
recognizer = (Recognizer) cm.lookup("recognizer");
recognizer.allocate();
info("starting microphone");
microphone = (Microphone) cm.lookup("microphone");
if (!microphone.startRecording()) {
log.error("Cannot start microphone.");
recognizer.deallocate();
}
// loop the recognition until the program exits.
isListening = true;
while (isRunning) {
info("listening: %b", isListening);
invoke("listeningEvent", true);
Result result = recognizer.recognize();
if (!isListening) {
// we could have stopped listening
Thread.sleep(250);
continue;
}
log.info("Recognized Loop: {} Listening: {}", result, isListening);
// log.error(result.getBestPronunciationResult());
if (result != null) {
String resultText = result.getBestFinalResultNoFiller();
if (StringUtils.isEmpty(resultText)) {
// nothing heard?
continue;
}
log.info("recognized: " + resultText + '\n');
if (resultText.length() > 0 && isListening) {
if (lockPhrases.size() > 0 && !lockPhrases.contains(resultText) && !confirmations.containsKey(resultText)) {
log.info("but locked on {}", resultText);
continue;
}
// command system being used
if (commands != null) {
if (currentCommand != null && (confirmations == null || confirmations.containsKey(resultText))) {
// i have a command and a confirmation
// command sent
send(currentCommand);
// command finished
currentCommand = null;
invoke("publishText", "ok");
continue;
} else if (currentCommand != null && negations.containsKey(resultText)) {
// negation has happened... recognized the
// wrong command
// reset command
currentCommand = null;
// apologee
invoke("publishText", "sorry");
continue;
} else if (commands.containsKey(resultText) && (confirmations != null || negations != null)) {
if (bypass != null && bypass.containsKey(resultText)) {
// we have confirmation and/or negations
// - but we also have a bypass
send(currentCommand);
} else {
// setting new potential command - using
// either confirmations or negations
Message cmd = commands.get(resultText);
currentCommand = cmd;
invoke("publishRequestConfirmation", resultText);
// continue in the loop, we should stop listening, and we
// shouldn't publish the text becuase we just asked for
// confirmation.
continue;
}
} else if (commands.containsKey(resultText)) {
// no confirmations or negations are being
// used - just send command
Message cmd = commands.get(resultText);
send(cmd);
} else {
error(String.format("unknown use case for Sphinx commands - word is %s", resultText));
// we don't know what this command was.. just continue.. we
// shouldn't publish text or recognized.
// we recognized it. but we don't publish text..
invoke("recognized", resultText);
continue;
}
}
// publishRecognized(resultText);
// Only publish the text if there was a known command?
invoke("publishText", resultText);
invoke("recognized", resultText);
}
} else {
sleep(250);
// invoke("unrecognizedSpeech");
log.error("I can't hear what you said.\n");
}
}
} catch (Exception e) {
error(e);
}
}
}
private static final long serialVersionUID = 1L;
public final static Logger log = LoggerFactory.getLogger(Sphinx.class.getCanonicalName());
transient Microphone microphone = null;
transient ConfigurationManager cm = null;
transient Recognizer recognizer = null;
transient SpeechProcessor speechProcessor = null;
HashSet<String> lockPhrases = new HashSet<String>();
HashMap<String, Message> confirmations = null;
HashMap<String, Message> negations = null;
HashMap<String, Message> bypass = null;
Message currentCommand = null;
public static void main(String[] args) {
LoggingFactory.init(Level.DEBUG);
try {
Sphinx ear = (Sphinx) Runtime.start("ear", "Sphinx");
SpeechSynthesis speech = (MarySpeech) Runtime.start("speech", "MarySpeech");
((MarySpeech) speech).startService();
// attache speech to ear -
// auto subscribes to "request confirmation"
// so that speech asks for confirmation
// TODO - put this in gui so state will be updated with text
// question
ear.addMouth(speech);
Log log = (Log) Runtime.createAndStart("log", "Log");
Clock clock = (Clock) Runtime.createAndStart("clock", "Clock");
// TODO - got to do this - it will be KICKASS !
// log.subscribe(outMethod, publisherName, inMethod, parameterType)
// new MRLListener("pulse", log.getName(), "log");
ear.addCommand("log", log.getName(), "log");
ear.addCommand("log subscribe to clock", log.getName(), "subscribe", new Object[] { "pulse", });
ear.addCommand("start clock", clock.getName(), "startClock");
ear.addCommand("stop clock", clock.getName(), "stopClock");
ear.addCommand("set clock interval to five seconds", clock.getName(), "setInterval", 5000);
ear.addCommand("set clock interval to ten seconds", clock.getName(), "setInterval", 10000);
ear.addComfirmations("yes", "correct", "right", "yeah", "ya");
ear.addNegations("no", "incorrect", "wrong", "nope", "nah");
ear.startListening();
// ear.startListening("camera on | camera off | arm left | arm right |
// hand left | hand right ");
// ear.startListening("yes | no");
// Sphinx ear = new Sphinx("ear");
// ear.createGrammar("hello | up | down | yes | no");
// ear.startService();
} catch (Exception e) {
Logging.logError(e);
}
}
public Sphinx(String n, String id) {
super(n, id);
}
public void addBypass(String... txt) {
if (bypass == null) {
bypass = new HashMap<String, Message>();
}
Message bypassCommand = Message.createMessage(getName(), getName(), "bypass", null);
for (int i = 0; i < txt.length; ++i) {
bypass.put(txt[i], bypassCommand);
}
}
public void addComfirmations(String... txt) {
if (confirmations == null) {
confirmations = new HashMap<String, Message>();
}
Message confirmCommand = Message.createMessage(getName(), getName(), "confirmation", null);
for (int i = 0; i < txt.length; ++i) {
confirmations.put(txt[i], confirmCommand);
}
}
public void addNegations(String... txt) {
if (negations == null) {
negations = new HashMap<String, Message>();
}
Message negationCommand = Message.createMessage(getName(), getName(), "negation", null);
for (int i = 0; i < txt.length; ++i) {
negations.put(txt[i], negationCommand);
}
}
public void addTextListener(TextListener service) {
addListener("publishText", service.getName(), "onText");
}
public void addVoiceRecognitionListener(Service s) {
// TODO - reflect on a public heard method - if doesn't exist error ?
this.addListener("recognized", s.getName(), "heard");
}
// TODO - make "Speech" interface if desired
// public boolean attach(SpeechSynthesis mouth) {
// if (mouth == null) {
// warn("can not attach mouth is null");
// return false;
// }
// // if I'm speaking - I shouldn't be listening
// mouth.addEar(this);
// this.addListener("publishText", mouth.getName(), "onText");
// this.addListener("publishRequestConfirmation", mouth.getName(),
// "onRequestConfirmation");
// log.info(String.format("attached Speech service %s to Sphinx service %s
// with default message routes", mouth.getName(), getName()));
// return true;
// }
public void buildGrammar(StringBuffer sb, HashMap<String, Message> cmds) {
if (cmds != null) {
if (sb.length() > 0) {
sb.append("|");
}
int cnt = 0;
for (String key : cmds.keySet()) {
++cnt;
sb.append(key);
if (cnt < cmds.size()) {
sb.append("|");
}
}
}
}
/*
* public void publishRecognized(String recognizedText) { invoke("recognized",
* recognizedText); }
*/
public void clearLock() {
lockPhrases.clear();
}
/**
* createGrammar must be called before the Service starts if a new grammar is
* needed
*
* example: Sphinx.createGrammar ("ear", "stop | go | left | right | back");
* ear = Runtime.create("ear", "Sphinx")
*
* param filename - name of the Service which will be utilizing this grammar
*
* @param grammar
* - grammar content
* @return true/false
*/
public boolean createGrammar(String grammar) {
log.info("creating grammar [{}]", grammar);
// FIXME - probably broken
// get base simple.xml file - and modify it to
// point to the correct .gram file
String simplexml = getResourceAsString("simple.xml");
// String grammarLocation = "file://" + cfgDir.replaceAll("\\\\", "/") +
// "/";
// simplexml = simplexml.replaceAll("resource:/resource/",
// cfgDir.replaceAll("\\\\", "/"));
simplexml = simplexml.replaceAll("resource:/resource/", FileIO.getCfgDir());
// a filename like i01.ear.gram (without the gram extention of course
// because is sucks this out of the xml"
// and re-processes it to be as fragile as possible :P
String grammarFileName = getName();
grammarFileName = grammarFileName.replaceAll("\\.", "_");
if (grammarFileName.contains(".")) {
grammarFileName = grammarFileName.substring(0, grammarFileName.indexOf("."));
}
simplexml = simplexml.replaceAll("name=\"grammarName\" value=\"simple\"", "name=\"grammarName\" value=\"" + grammarFileName + "\"");
try {
FileIO.toFile(String.format("%s%s%s.%s", FileIO.getCfgDir(), File.separator, grammarFileName, "xml"), simplexml);
// save("xml", simplexml);
String gramdef = "#JSGF V1.0;\n" + "grammar " + grammarFileName + ";\n" + "public <greet> = (" + grammar + ");";
FileIO.toFile(String.format("%s%s%s.%s", FileIO.getCfgDir(), File.separator, grammarFileName, "gram"), gramdef);
} catch (Exception e) {
Logging.logError(e);
return false;
}
// save("gram", gramdef);
return true;
}
public boolean isRecording() {
return microphone.isRecording();
}
/*
* an inbound port for Speaking Services (TTS) - which suppress listening such
* that a system will not listen when its talking, otherwise a feedback loop
* can occur
*
*
*/
public synchronized boolean onIsSpeaking(Boolean talking) {
if (talking) {
isListening = false;
log.info("I'm talking so I'm not listening"); // Gawd, ain't that
// the truth !
} else {
isListening = true;
log.info("I'm not talking so I'm listening"); // mebbe
}
return talking;
}
/**
* Event is sent when the listening Service is actually listening. There is
* some delay when it initially loads.
*/
@Override
public void listeningEvent(Boolean event) {
return;
}
/*
* FIXME - the trunk is broke - the configuration is horrible find a way to
* make this work, despite Sphinx's chaos !
*
* function to swap grammars to allow sphinx a little more capability
* regarding "new words"
*
* check http://cmusphinx.sourceforge.net/wiki/sphinx4:swappinggrammars
*
* @throws PropertyException
*/
/*
* FIXME SPHINX IS A MESS IT CAN"T DO THIS ALTHOUGH DOCUMENTATION SAYS IT CAN
* void swapGrammar(String newGrammarName) throws PropertyException,
* InstantiationException, IOException { log.debug("Swapping to grammar " +
* newGrammarName); Linguist linguist = (Linguist) cm.lookup("flatLinguist");
* linguist.deallocate(); // TODO - bundle sphinx4-1.0beta6 //
* cm.setProperty("jsgfGrammar", "grammarName", newGrammarName);
*
* linguist.allocate(); }
*/
public void lockOutAllGrammarExcept(String lockPhrase) {
this.lockPhrases.add(lockPhrase);
}
/*
* deprecated public void onCommand(String command, String targetName, String
* targetMethod, Object... data) { Message msg = new Message(); msg.name =
* targetName; msg.method = targetMethod; msg.data = data;
*
* commandMap.put(command, msg); }
*/
/**
* method to suppress recognition listening events This is important when
* Sphinx is listening --> then Speaking, typically you don't want Sphinx
* to listen to its own speech, it causes a feedback loop and with Sphinx not
* really very accurate, it leads to weirdness -- additionally it does not
* recreate the speech processor - so its not as heavy handed
*/
@Override
public synchronized void pauseListening() {
log.info("Pausing Listening");
isListening = false;
if (microphone != null && recognizer != null) {
// TODO: what does reset monitors do? maybe clear the microphone?
// maybe neither of these do anything useful
microphone.stopRecording();
// microphone.clear();
// recognizer.resetMonitors();
}
}
@Override
public String publishText(String recognizedText) {
return recognizedText;
}
/**
* The main output for this service.
*
* @return the word
*/
@Override
public String recognized(String word) {
return word;
}
public String publishRequestConfirmation(String txt) {
// TODO: rename this to publishRequestConfirmation
return txt;
}
@Override
public void resumeListening() {
log.info("resuming listening");
isListening = true;
if (microphone != null) {
// TODO: no idea if this does anything useful.
microphone.clear();
microphone.startRecording();
}
}
// FYI - grammar must be created BEFORE we start to listen
@Override
public void startListening() {
startListening(null); // use existing grammar
}
// FIXME - re-entrant - make it create new speechProcessor
// assume its a new grammar
public void startListening(String grammar) {
if (speechProcessor != null) {
log.warn("already listening");
return;
}
StringBuffer newGrammar = new StringBuffer();
buildGrammar(newGrammar, commands);
buildGrammar(newGrammar, confirmations);
buildGrammar(newGrammar, negations);
buildGrammar(newGrammar, bypass);
if (grammar != null) {
if (newGrammar.length() > 0) {
newGrammar.append("|");
}
newGrammar.append(cleanGrammar(grammar));
}
createGrammar(newGrammar.toString());
speechProcessor = new SpeechProcessor(this);
speechProcessor.start();
}
private String cleanGrammar(String grammar) {
// sphinx doesn't like punctuation in it's grammar commas and periods give
// it a hard time.
String clean = grammar.replaceAll("[\\.\\,]", " ");
return clean;
}
public void startRecordingx() {
microphone.clear();
microphone.startRecording();
}
/**
* stopRecording - it does "work", however, the speech recognition part seems
* to degrade when startRecording is called. I have worked around this by not
* stopping the recording, but by not processing what was recognized
*/
@Override
public void stopListening() {
if (microphone != null) {
microphone.stopRecording();
microphone.clear();
}
isListening = false;
if (speechProcessor != null) {
speechProcessor.isRunning = false;
}
speechProcessor = null;
}
@Override
public void stopService() {
super.stopService();
stopListening();
if (recognizer != null) {
recognizer.deallocate();
recognizer = null;
}
if (microphone != null) {
microphone.stopRecording();
microphone = null;
}
}
@Override
public void addMouth(SpeechSynthesis mouth) {
if (mouth == null) {
warn("can not attach mouth is null");
return;
}
// if I'm speaking - I shouldn't be listening
mouth.addEar(this);
this.addListener("publishText", mouth.getName(), "onText");
this.addListener("publishRequestConfirmation", mouth.getName(), "onRequestConfirmation");
addListener("requestConfirmation", mouth.getName(), "onRequestConfirmation");
log.info("attached Speech service {} to Sphinx service {} with default message routes", mouth.getName(), getName());
}
@Override
public Map<String, Locale> getLocales() {
return Locale.getLocaleMap("en-US");
}
@Override
public void attachTextListener(String name) {
addListener("publishText", name);
}
}
| |
package com.readytalk.crdt.sets;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.readytalk.crdt.util.CollectionUtils.checkCollectionDoesNotContainNull;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import javax.annotation.Nullable;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Sets;
import com.readytalk.crdt.AbstractCRDT;
public class ORSet<E> extends AbstractCRDT<ImmutableSet<E>, ORSet<E>> implements
CRDTSet<E, ImmutableSet<E>, ORSet<E>> {
private static final String ELEMENTS_TOKEN = "e";
private static final String TOMBSTONES_TOKEN = "t";
private final Multimap<E, UUID> elements = LinkedHashMultimap.create();
private final Multimap<E, UUID> tombstones = LinkedHashMultimap.create();
public ORSet(final ObjectMapper mapper) {
super(mapper);
}
public ORSet(final ObjectMapper mapper, final byte[] value) {
super(mapper);
TypeReference<Map<String, Map<E, Collection<UUID>>>> ref =
new TypeReference<Map<String, Map<E, Collection<UUID>>>>() {
};
try {
Map<String, Map<E, Collection<UUID>>> s1 = mapper.readValue(value,
ref);
Map<E, Collection<UUID>> e = s1.get(ELEMENTS_TOKEN);
Map<E, Collection<UUID>> t = s1.get(TOMBSTONES_TOKEN);
for (Map.Entry<E, Collection<UUID>> o : e.entrySet()) {
elements.putAll(o.getKey(), o.getValue());
}
for (Map.Entry<E, Collection<UUID>> o : t.entrySet()) {
tombstones.putAll(o.getKey(), o.getValue());
}
} catch (IOException ex) {
throw new IllegalArgumentException("Unable to deserialize.", ex);
}
}
@Override
public boolean add(final E value) {
checkNotNull(value);
UUID uuid = UUID.randomUUID();
boolean retval = !elements.containsKey(value);
elements.put(value, uuid);
return retval;
}
@Override
public boolean addAll(final Collection<? extends E> values) {
checkNotNull(values);
checkCollectionDoesNotContainNull(values);
boolean retval = false;
for (E o : values) {
retval |= this.add(o);
}
return retval;
}
@Override
public void clear() {
this.tombstones.putAll(this.elements);
this.elements.clear();
}
@Override
public boolean contains(final Object value) {
checkNotNull(value);
return this.elements.containsKey(value);
}
@Override
public boolean containsAll(final Collection<?> values) {
checkCollectionDoesNotContainNull(values);
return this.value().containsAll(values);
}
@Override
public boolean isEmpty() {
return elements.isEmpty();
}
@Override
public Iterator<E> iterator() {
return Iterators
.unmodifiableIterator(this.elements.keySet().iterator());
}
@SuppressWarnings("unchecked")
@Override
public boolean remove(final Object value) {
checkNotNull(value);
this.tombstones.putAll((E) value, elements.get((E) value));
return elements.removeAll(value).size() > 0;
}
@Override
public boolean removeAll(final Collection<?> values) {
checkNotNull(values);
checkCollectionDoesNotContainNull(values);
Multimap<E, UUID> subset = Multimaps.filterKeys(elements,
new Predicate<E>() {
@Override
public boolean apply(final E input) {
return values.contains(input);
}
});
if (subset.isEmpty()) {
return false;
}
for (E o : Sets.newLinkedHashSet(subset.keySet())) {
Collection<UUID> result = this.elements.removeAll(o);
this.tombstones.putAll(o, result);
}
return true;
}
@Override
@SuppressWarnings("unchecked")
public boolean retainAll(final Collection<?> values) {
checkNotNull(values);
checkCollectionDoesNotContainNull(values);
Set<E> input = Sets.newHashSet((Collection<E>)values);
Set<E> diff = Sets.difference(this.elements.keySet(), input);
return this.removeAll(diff);
}
@Override
public int size() {
return elements.keySet().size();
}
@Override
public Object[] toArray() {
return elements.keySet().toArray();
}
@Override
public <T> T[] toArray(final T[] arg) {
return elements.keySet().toArray(arg);
}
@Override
public ORSet<E> merge(final ORSet<E> other) {
ORSet<E> retval = new ORSet<E>(serializer());
retval.elements.putAll(this.elements);
retval.elements.putAll(other.elements);
retval.tombstones.putAll(this.tombstones);
retval.tombstones.putAll(other.elements);
retval.elements.removeAll(retval.tombstones);
return retval;
}
@Override
public ImmutableSet<E> value() {
return ImmutableSet.copyOf(elements.keySet());
}
@Override
public byte[] payload() {
Map<String, Object> retval = Maps.newLinkedHashMap();
retval.put(ELEMENTS_TOKEN, elements.asMap());
retval.put(TOMBSTONES_TOKEN, tombstones.asMap());
try {
return serializer().writeValueAsBytes(retval);
} catch (IOException ex) {
throw new IllegalStateException("Unable to serialize object.", ex);
}
}
@Override
public final boolean equals(@Nullable final Object o) {
if (!(o instanceof ORSet)) {
return false;
}
ORSet<?> t = (ORSet<?>) o;
if (this == t) {
return true;
} else {
return this.value().equals(t.value());
}
}
@Override
public final int hashCode() {
return this.value().hashCode();
}
@Override
public String toString() {
return this.value().toString();
}
}
| |
/*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.console.ui;
import com.google.common.base.Function;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.logic.console.Console;
import org.terasology.logic.console.ConsoleColors;
import org.terasology.logic.console.CoreMessageType;
import org.terasology.logic.console.Message;
import org.terasology.logic.console.commandSystem.ConsoleCommand;
import org.terasology.logic.console.commandSystem.exceptions.CommandSuggestionException;
import org.terasology.logic.players.LocalPlayer;
import org.terasology.naming.Name;
import org.terasology.rendering.FontColor;
import org.terasology.utilities.CamelCaseMatcher;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* A text completion engine with cycle-through functionality
*
* @author Martin Steiger, Limeth
*/
public class CyclingTabCompletionEngine implements TabCompletionEngine {
private final Console console;
private int selectionIndex;
private List<String> previousMatches; //Alphabetically ordered list of matches
private Message previousMessage;
private Collection<String> commandNames;
private String query;
private LocalPlayer localPlayer;
public CyclingTabCompletionEngine(Console console, LocalPlayer localPlayer) {
this.console = console;
this.localPlayer = localPlayer;
}
private boolean updateCommandNamesIfNecessary() {
Collection<ConsoleCommand> commands = console.getCommands();
if (commandNames != null && commandNames.size() == commands.size()) {
return false;
}
commandNames = Collections2.transform(commands, new Function<ConsoleCommand, String>() {
@Override
public String apply(ConsoleCommand input) {
return input.getName().toString();
}
});
return true;
}
private Set<String> findMatches(Name commandName, List<String> commandParameters,
ConsoleCommand command, int suggestedIndex) {
if (suggestedIndex <= 0) {
updateCommandNamesIfNecessary();
return CamelCaseMatcher.getMatches(commandName.toString(), commandNames, true);
} else if (command == null) {
return null;
}
List<String> finishedParameters = Lists.newArrayList();
for (int i = 0; i < suggestedIndex - 1; i++) {
finishedParameters.add(commandParameters.get(i));
}
String currentValue = commandParameters.size() >= suggestedIndex ? commandParameters.get(suggestedIndex - 1) : null;
EntityRef sender = localPlayer.getClientEntity();
try {
return command.suggest(currentValue, finishedParameters, sender);
} catch (CommandSuggestionException e) {
String causeMessage = e.getLocalizedMessage();
if (causeMessage == null) {
Throwable cause = e.getCause();
causeMessage = cause.getLocalizedMessage();
if (causeMessage == null || causeMessage.isEmpty()) {
causeMessage = cause.toString();
if (causeMessage == null || causeMessage.isEmpty()) {
return null;
}
}
}
console.addMessage("Error when suggesting command: " + causeMessage, CoreMessageType.ERROR);
return null;
}
}
@Override
public String complete(String rawCommand) {
if (rawCommand.length() <= 0) {
reset();
previousMessage = new Message("Type 'help' to list all commands.");
console.addMessage(previousMessage);
return null;
} else if (query == null) {
query = rawCommand;
}
String commandNameRaw = console.processCommandName(query);
Name commandName = new Name(commandNameRaw);
List<String> commandParameters = console.processParameters(query);
ConsoleCommand command = console.getCommand(commandName);
int suggestedIndex = commandParameters.size() + (query.charAt(query.length() - 1) == ' ' ? 1 : 0);
Set<String> matches = findMatches(commandName, commandParameters, command, suggestedIndex);
if (matches == null || matches.size() <= 0) {
return query;
}
if (previousMatches == null || !matches.equals(Sets.newHashSet(previousMatches))) {
reset(false);
if (matches.size() == 1) {
return generateResult(matches.iterator().next(), commandName, commandParameters, suggestedIndex);
}
/* if (matches.length > MAX_CYCLES) {
console.addMessage(new Message("Too many hits, please refine your search"));
return query;
}*/ //TODO Find out a better way to handle too many results while returning useful information
previousMatches = Lists.newArrayList(matches);
Collections.sort(previousMatches);
}
StringBuilder matchMessageString = new StringBuilder();
for (int i = 0; i < previousMatches.size(); i++) {
if (i > 0) {
matchMessageString.append(' ');
}
String match = previousMatches.get(i);
if (selectionIndex == i) {
match = FontColor.getColored(match, ConsoleColors.COMMAND);
}
matchMessageString.append(match);
}
Message matchMessage = new Message(matchMessageString.toString());
String suggestion = previousMatches.get(selectionIndex);
if (previousMessage != null) {
console.replaceMessage(previousMessage, matchMessage);
} else {
console.addMessage(matchMessage);
}
previousMessage = matchMessage;
selectionIndex = (selectionIndex + 1) % previousMatches.size();
return generateResult(suggestion, commandName, commandParameters, suggestedIndex);
}
private String generateResult(String suggestion, Name commandName,
List<String> commandParameters, int suggestedIndex) {
if (suggestedIndex <= 0) {
return suggestion;
} else {
StringBuilder result = new StringBuilder();
result.append(commandName.toString());
for (int i = 0; i < suggestedIndex - 1; i++) {
result.append(" ");
result.append(commandParameters.get(i));
}
result.append(" ");
result.append(suggestion);
return result.toString();
}
}
private void reset(boolean removeQuery) {
if (previousMessage != null) {
console.removeMessage(previousMessage);
}
if (removeQuery) {
query = null;
}
previousMessage = null;
previousMatches = null;
selectionIndex = 0;
}
@Override
public void reset() {
reset(true);
}
}
| |
package com.daedafusion.knowledge.ontology.model;
import com.daedafusion.sparql.Literal;
import com.hp.hpl.jena.query.*;
import com.hp.hpl.jena.rdf.model.*;
import org.apache.log4j.Logger;
import java.io.InputStream;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Created by mphilpot on 7/14/14.
*/
public class OntologyModel
{
private static final Logger log = Logger.getLogger(OntologyModel.class);
private Model model;
private Map<String, String> prefixes;
public OntologyModel()
{
model = ModelFactory.createDefaultModel();
// model = ModelFactory.createRDFSModel(ModelFactory.createDefaultModel());
prefixes = new HashMap<>();
prefixes.put("rdf", "<http://www.w3.org/1999/02/22-rdf-syntax-ns#>");
prefixes.put("rdfs", "<http://www.w3.org/2000/01/rdf-schema#>");
prefixes.put("xsd", "<http://www.w3.org/2001/XMLSchema#>");
prefixes.put("owl", "<http://www.w3.org/2002/07/owl#>");
}
public OntologyModel(Model model)
{
this();
this.model = model;
}
public Model getModel()
{
return model;
}
public void load(InputStream in, String type)
{
model.read(in, null, type);
}
public Set<String> getClasses()
{
Set<String> classes = new HashSet<>();
String sparql = String.format("%s construct where { ?s rdf:type owl:Class }", buildPrefixes());
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
for (Statement s : results.listStatements().toSet())
{
if(!s.getSubject().isAnon())
{
classes.add(s.getSubject().toString());
}
}
}
return classes;
}
public Set<String> getParentClasses(String uri)
{
Set<String> parents = new HashSet<>();
String sparql = String.format("%s construct where { <%s> rdfs:subClassOf ?o }", buildPrefixes(), uri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
parents.addAll(results.listStatements().toSet()
.stream()
.filter(s -> !s.getObject().isAnon())
.map(s -> s.getObject().toString())
.collect(Collectors.toList()));
Set<String> parentClosure = new HashSet<>();
parents.stream()
.filter(parent -> !parent.equals("http://www.w3.org/2000/01/rdf-schema#Resource") && !parent.equals(uri))
.forEach(parent -> parentClosure.addAll(getParentClasses(parent)));
parents.addAll(parentClosure);
}
return parents;
}
public Set<String> getDataProperties(String uri)
{
Set<String> dps = new HashSet<>();
String sparql = String.format("%s select ?s where { " +
"{ ?s rdf:type owl:DatatypeProperty . ?s rdfs:domain <%s> } UNION { ?s rdf:type rdf:Property . ?s rdfs:domain <%s> } }",
buildPrefixes(),
uri, uri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
ResultSet results = qe.execSelect();
for(;results.hasNext();)
{
QuerySolution sol = results.nextSolution();
Resource resource = sol.getResource("s");
dps.add(resource.toString());
}
Set<String> parents = getParentClasses(uri);
parents.stream()
.filter(parent -> !parent.equals("http://www.w3.org/2000/01/rdf-schema#Resource") && !parent.equals(uri))
.forEach(parent -> dps.addAll(getDataProperties(parent)));
}
return dps;
}
public Set<String> getDataProperties()
{
Set<String> dps = new HashSet<>();
String sparql = String.format("%s select ?s where { " +
"{ ?s rdf:type owl:DatatypeProperty } UNION { ?s rdf:type rdf:Property } }",
buildPrefixes());
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
ResultSet results = qe.execSelect();
for(;results.hasNext();)
{
QuerySolution sol = results.nextSolution();
Resource resource = sol.getResource("s");
dps.add(resource.toString());
}
}
return dps;
}
public Set<String> getObjectProperties(String uri)
{
Set<String> ops = new HashSet<>();
String sparql = String.format("%s construct where { ?s rdf:type owl:ObjectProperty . ?s rdfs:domain <%s> }",
buildPrefixes(), uri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
ops.addAll(results.listStatements().toSet()
.stream()
.map(s -> s.getSubject().toString())
.collect(Collectors.toList()));
Set<String> parentClosure = new HashSet<>();
for(String parent : getParentClasses(uri))
{
if(!parent.equals("http://www.w3.org/2000/01/rdf-schema#Resource") && !parent.equals(uri))
parentClosure.addAll(getObjectProperties(parent));
}
ops.addAll(parentClosure);
}
return ops;
}
public Set<String> getObjectProperties()
{
Set<String> ops = new HashSet<>();
String sparql = String.format("%s construct where { ?s rdf:type owl:ObjectProperty }",
buildPrefixes());
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
ops.addAll(results.listStatements().toSet()
.stream()
.map(s -> s.getSubject().toString())
.collect(Collectors.toList()));
}
return ops;
}
public Set<String> getObjectsOfType(String rdfType)
{
Set<String> objects = new HashSet<>();
String sparql = String.format("%s construct where { ?s rdf:type <%s> }", buildPrefixes(), rdfType);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
objects.addAll(results.listStatements().toSet()
.stream()
.map(s -> s.getSubject().toString())
.collect(Collectors.toList()));
}
return objects;
}
public String getTypeOfObject(String uri)
{
Set<String> rdfTypes = new HashSet<>();
String sparql = String.format("%s construct where { <%s> rdf:type ?o }", buildPrefixes(), uri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
rdfTypes.addAll(results.listStatements().toSet()
.stream()
.map(s -> s.getObject().toString())
.collect(Collectors.toList()));
}
if(rdfTypes.isEmpty())
{
return null;
}
if(rdfTypes.size() > 1)
{
log.warn(String.format("Found more than one rdfType for %s :: %s", uri, rdfTypes));
}
return rdfTypes.iterator().next();
}
public Set<String> getObjectsWithAnnotationProperty(String prop)
{
Set<String> objects = new HashSet<>();
String sparql = String.format("%s construct where { ?s <%s> ?o }", buildPrefixes(), prop);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
objects.addAll(results.listStatements().toSet()
.stream()
.map(s -> s.getSubject().toString())
.collect(Collectors.toList()));
}
return objects;
}
public Set<String> getObjectsWithAnnotationPropertyValue(String prop, String value)
{
//validateLiteral(value);
Set<String> objects = new HashSet<>();
String sparql = String.format("%s construct where { ?s <%s> \"%s\" }", buildPrefixes(), prop, value);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
objects.addAll(results.listStatements().toSet()
.stream()
.map(s -> s.getSubject().toString())
.collect(Collectors.toList()));
}
return objects;
}
public Set<Literal> getLabels(String uri)
{
Set<Literal> labels = new HashSet<>();
String sparql = String.format("%s construct where { <%s> rdfs:label ?o }", buildPrefixes(), uri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
labels.addAll(results.listStatements().toSet()
.stream()
.map(s -> getLiteral(s.getObject()))
.collect(Collectors.toList()));
}
return labels;
}
public Set<Literal> getComments(String uri)
{
Set<Literal> comments = new HashSet<>();
String sparql = String.format("%s construct where { <%s> rdfs:comment ?o }", buildPrefixes(), uri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
comments.addAll(results.listStatements().toSet()
.stream()
.map(s -> getLiteral(s.getObject()))
.collect(Collectors.toList()));
}
return comments;
}
public Set<String> getDomainOfProperty(String propUri)
{
Set<String> domains = new HashSet<>();
String sparql = String.format("%s construct where { <%s> rdfs:domain ?o }", buildPrefixes(), propUri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
for (Statement s : results.listStatements().toSet())
{
if(s.getObject().isAnon())
{
String select = String.format("%s select ?member where { <%s> rdfs:domain ?o . ?o owl:unionOf ?list . ?list rdf:rest*/rdf:first ?member }",
buildPrefixes(), propUri);
Query query2 = QueryFactory.create(select);
try(QueryExecution qe2 = QueryExecutionFactory.create(query2, model))
{
ResultSet results2 = qe2.execSelect();
for(;results2.hasNext();)
{
QuerySolution sol = results2.nextSolution();
Resource resource = sol.getResource("s");
domains.add(resource.toString());
}
}
}
else
{
domains.add(s.getObject().toString());
}
}
}
return domains;
}
public Set<String> getRangeOfProperty(String classUri, String propUri)
{
Set<String> range = new HashSet<>();
String rdfType = getTypeOfObject(classUri);
boolean isObjectProperty = rdfType.equals("http://www.w3.org/2002/07/owl#ObjectProperty");
String select = String.format("%s select ?orsc ?orp ?orv where { <%s> rdfs:subClassOf ?orsc . ?orsc rdf:type owl:Restriction . ?orsc ?orp ?orv }",
buildPrefixes(), classUri);
// orsc orp orv
Map<String, Map<String, RDFNode>> tmp = new HashMap<>();
Query query = QueryFactory.create(select);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
ResultSet results = qe.execSelect();
for(;results.hasNext();)
{
QuerySolution sol = results.nextSolution();
String orsc = sol.getResource("orsc").toString();
String orp = sol.getResource("orp").toString();
RDFNode orv = sol.get("orv");
if(!tmp.containsKey(orsc))
{
tmp.put(orsc, new HashMap<>());
}
tmp.get(orsc).put(orp, orv);
}
}
// Contains overrides
for(Map.Entry<String, Map<String, RDFNode>> entry : tmp.entrySet())
{
String orsc = entry.getKey();
Map<String, RDFNode> t = entry.getValue();
if(t.containsKey("http://www.w3.org/2002/07/owl#onProperty") &&
t.get("http://www.w3.org/2002/07/owl#onProperty").toString().equals(propUri) &&
(t.containsKey("http://www.w3.org/2002/07/owl#someValuesFrom") ||
t.containsKey("http://www.w3.org/2002/07/owl#allValuesFrom") ||
t.containsKey("http://www.w3.org/2002/07/owl#hasValue")) )
{
String[] restrictionTypes = {
"http://www.w3.org/2002/07/owl#someValuesFrom",
"http://www.w3.org/2002/07/owl#allValuesFrom",
"http://www.w3.org/2002/07/owl#hasValue"
};
for(String rt : restrictionTypes)
{
select = String.format("%s select ?member where { <%s> rdfs:subClassOf ?o . ?o <%s> ?o2 . ?o2 owl:oneOf ?list . ?list rdf:rest*/rdf:first ?member }",
buildPrefixes(), propUri, rt);
query = QueryFactory.create(select);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
ResultSet results = qe.execSelect();
for(;results.hasNext();)
{
QuerySolution sol = results.nextSolution();
String member = sol.getResource("member").toString();
range.add(member);
}
}
}
}
}
// TODO This is wrong but I need the uber owl file to debug
String construct = String.format("%s construct where { <%s> rdfs:range ?o }", buildPrefixes(), propUri);
query = QueryFactory.create(construct);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
for (Statement s : results.listStatements().toSet())
{
if(s.getObject().isAnon())
{
if (isObjectProperty)
{
select = String.format("%s select ?member where { <%s> rdfs:range ?o . ?o owl:unionOf ?list . ?list rdf:rest*/rdf:first ?member }",
buildPrefixes(), propUri);
query = QueryFactory.create(select);
try (QueryExecution qe2 = QueryExecutionFactory.create(query, model))
{
ResultSet results2 = qe2.execSelect();
for (; results2.hasNext(); )
{
QuerySolution sol = results2.nextSolution();
String member = getResource(sol.get("member"));
range.add(member);
}
}
}
else
{
select = String.format("%s select ?member where { <%s> rdfs:range ?o . ?o owl:oneOf ?list . ?list rdf:rest*/rdf:first ?member }",
buildPrefixes(), propUri);
query = QueryFactory.create(select);
try(QueryExecution qe2 = QueryExecutionFactory.create(query, model))
{
ResultSet results2 = qe2.execSelect();
for(;results2.hasNext();)
{
QuerySolution sol = results2.nextSolution();
String member = getResource(sol.get("member"));
range.add(member);
}
}
}
}
else
{
range.add(s.getObject().asResource().toString());
}
}
}
return range;
}
/**
*
* @param classUri
* @param propUri
* @return [0] = min, [1] = max, [2] = exact, null if not specified
*/
public Integer[] getCardinality(String classUri, String propUri)
{
Integer[] cardinality = new Integer[3];
String select = String.format("%s select ?orsc ?orp ?orv where { <%s> rdfs:subClassOf ?orsc . ?orsc rdf:type owl:Restriction . ?orsc ?orp ?orv }",
buildPrefixes(), classUri);
// orsc orp orv
Map<String, Map<String, RDFNode>> tmp = new HashMap<>();
Query query = QueryFactory.create(select);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
ResultSet results = qe.execSelect();
for(;results.hasNext();)
{
QuerySolution sol = results.nextSolution();
String orsc = sol.getResource("orsc").toString();
String orp = sol.getResource("orp").toString();
RDFNode orv = sol.get("orv");
if(!tmp.containsKey(orsc))
{
tmp.put(orsc, new HashMap<>());
}
tmp.get(orsc).put(orp, orv);
}
}
for(Map.Entry<String, Map<String, RDFNode>> entry : tmp.entrySet())
{
String orsc = entry.getKey();
Map<String, RDFNode> t = entry.getValue();
if(t.containsKey("http://www.w3.org/2002/07/owl#onProperty") &&
t.get("http://www.w3.org/2002/07/owl#onProperty").toString().equals(propUri))
{
if(t.containsKey("http://www.w3.org/2002/07/owl#minCardinality"))
{
cardinality[0] = t.get("http://www.w3.org/2002/07/owl#minCardinality").asLiteral().getInt();
}
if(t.containsKey("http://www.w3.org/2002/07/owl#maxCardinality"))
{
cardinality[1] = t.get("http://www.w3.org/2002/07/owl#maxCardinality").asLiteral().getInt();
}
if(t.containsKey("http://www.w3.org/2002/07/owl#cardinality"))
{
cardinality[2] = t.get("http://www.w3.org/2002/07/owl#cardinality").asLiteral().getInt();
}
}
}
return cardinality;
}
public Map<String, Set<Literal>> getAnnotationProperties(String uri)
{
Map<String, Set<Literal>> properties = new HashMap<>();
String sparql = String.format("%s construct where { <%s> ?p ?o }", buildPrefixes(), uri);
Query query = QueryFactory.create(sparql);
try(QueryExecution qe = QueryExecutionFactory.create(query, model))
{
Model results = qe.execConstruct();
for (Statement s : results.listStatements().toSet())
{
String predicate = s.getPredicate().toString();
if(!properties.containsKey(predicate))
{
properties.put(predicate, new HashSet<Literal>());
}
properties.get(predicate).add(getLiteral(s.getObject()));
}
}
return properties;
}
// private void validateLiteral(String value)
// {
// String regex = "\\\".+?\\\"(@[a-z\\-]+$|\\^\\^.+)?";
//
// if(!value.matches(regex))
// {
// throw new IllegalArgumentException("Literal does not conform to spec");
// }
// }
private String buildPrefixes()
{
StringBuilder builder = new StringBuilder();
for(String key : prefixes.keySet())
{
builder.append(String.format("prefix %s: %s \n", key, prefixes.get(key)));
}
return builder.toString();
}
public String getResource(RDFNode node)
{
String object = "";
if (node.isResource())
{
object = node.asResource().getURI();
} else
{
log.warn("Literal node processed as resource");
}
return object;
}
public Literal getLiteral(RDFNode node)
{
Literal lit = new Literal();
if(node.isLiteral())
{
lit.value = node.asLiteral().getString();
if(node.asLiteral().getDatatypeURI() != null && !node.asLiteral().getDatatypeURI().equals(""))
{
lit.type = node.asLiteral().getDatatypeURI();
}
else if(node.asLiteral().getLanguage() != null && !node.asLiteral().getLanguage().equals(""))
{
lit.lang = node.asLiteral().getLanguage();
}
}
else if(node.isResource())
{
lit.value = node.asResource().getURI();
}
else
{
// Anonymous nodes
lit.value = node.toString();
}
return lit;
}
public Map<String, String> getPrefixes()
{
return prefixes;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.engine;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterCodecReader;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LazySoftDeletesDirectoryReaderWrapper;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LiveIndexWriterConfig;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ReferenceManager;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.replication.ReplicationResponse;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.routing.AllocationId;
import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.MapperTestUtils;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.index.seqno.LocalCheckpointTracker;
import org.elasticsearch.index.seqno.ReplicationTracker;
import org.elasticsearch.index.seqno.RetentionLeases;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.SearcherHelper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogConfig;
import org.elasticsearch.index.translog.TranslogDeletionPolicy;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
import java.util.function.ToLongBiFunction;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static java.util.Collections.shuffle;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
public abstract class EngineTestCase extends ESTestCase {
protected final ShardId shardId = new ShardId(new Index("index", "_na_"), 0);
protected final AllocationId allocationId = AllocationId.newInitializing();
protected static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY);
protected ThreadPool threadPool;
protected TranslogHandler translogHandler;
protected Store store;
protected Store storeReplica;
protected InternalEngine engine;
protected InternalEngine replicaEngine;
protected IndexSettings defaultSettings;
protected String codecName;
protected Path primaryTranslogDir;
protected Path replicaTranslogDir;
// A default primary term is used by engine instances created in this test.
protected final PrimaryTermSupplier primaryTerm = new PrimaryTermSupplier(1L);
protected static void assertVisibleCount(Engine engine, int numDocs) throws IOException {
assertVisibleCount(engine, numDocs, true);
}
protected static void assertVisibleCount(Engine engine, int numDocs, boolean refresh) throws IOException {
if (refresh) {
engine.refresh("test");
}
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), collector);
assertThat(collector.getTotalHits(), equalTo(numDocs));
}
}
protected Settings indexSettings() {
// TODO randomize more settings
return Settings.builder()
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "1h") // make sure this doesn't kick in on us
.put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName)
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.getKey(),
between(10, 10 * IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.get(Settings.EMPTY)))
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000))
.build();
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
primaryTerm.set(randomLongBetween(1, Long.MAX_VALUE));
CodecService codecService = new CodecService(null, logger);
String name = Codec.getDefault().getName();
if (Arrays.asList(codecService.availableCodecs()).contains(name)) {
// some codecs are read only so we only take the ones that we have in the service and randomly
// selected by lucene test case.
codecName = name;
} else {
codecName = "default";
}
defaultSettings = IndexSettingsModule.newIndexSettings("test", indexSettings());
threadPool = new TestThreadPool(getClass().getName());
store = createStore();
storeReplica = createStore();
Lucene.cleanLuceneIndex(store.directory());
Lucene.cleanLuceneIndex(storeReplica.directory());
primaryTranslogDir = createTempDir("translog-primary");
translogHandler = createTranslogHandler(defaultSettings);
engine = createEngine(store, primaryTranslogDir);
LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig();
assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName());
if (randomBoolean()) {
engine.config().setEnableGcDeletes(false);
}
replicaTranslogDir = createTempDir("translog-replica");
replicaEngine = createEngine(storeReplica, replicaTranslogDir);
currentIndexWriterConfig = replicaEngine.getCurrentIndexWriterConfig();
assertEquals(replicaEngine.config().getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName());
if (randomBoolean()) {
engine.config().setEnableGcDeletes(false);
}
}
public EngineConfig copy(EngineConfig config, LongSupplier globalCheckpointSupplier) {
return new EngineConfig(config.getShardId(), config.getThreadPool(), config.getIndexSettings(),
config.getWarmer(), config.getStore(), config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(),
new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(),
config.getTranslogConfig(), config.getFlushMergesAfter(),
config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(),
config.getCircuitBreakerService(), globalCheckpointSupplier, config.retentionLeasesSupplier(),
config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier());
}
public EngineConfig copy(EngineConfig config, Analyzer analyzer) {
return new EngineConfig(config.getShardId(), config.getThreadPool(), config.getIndexSettings(),
config.getWarmer(), config.getStore(), config.getMergePolicy(), analyzer, config.getSimilarity(),
new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(),
config.getTranslogConfig(), config.getFlushMergesAfter(),
config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(),
config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(),
config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier());
}
public EngineConfig copy(EngineConfig config, MergePolicy mergePolicy) {
return new EngineConfig(config.getShardId(), config.getThreadPool(), config.getIndexSettings(),
config.getWarmer(), config.getStore(), mergePolicy, config.getAnalyzer(), config.getSimilarity(),
new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(),
config.getTranslogConfig(), config.getFlushMergesAfter(),
config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(),
config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(),
config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier());
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
try {
if (engine != null && engine.isClosed.get() == false) {
engine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs();
assertNoInFlightDocuments(engine);
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine);
assertMaxSeqNoInCommitUserData(engine);
assertAtMostOneLuceneDocumentPerSequenceNumber(engine);
}
if (replicaEngine != null && replicaEngine.isClosed.get() == false) {
replicaEngine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs();
assertNoInFlightDocuments(replicaEngine);
assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine);
assertMaxSeqNoInCommitUserData(replicaEngine);
assertAtMostOneLuceneDocumentPerSequenceNumber(replicaEngine);
}
assertThat(engine.config().getCircuitBreakerService().getBreaker(CircuitBreaker.ACCOUNTING).getUsed(), equalTo(0L));
assertThat(replicaEngine.config().getCircuitBreakerService().getBreaker(CircuitBreaker.ACCOUNTING).getUsed(), equalTo(0L));
} finally {
IOUtils.close(replicaEngine, storeReplica, engine, store, () -> terminate(threadPool));
}
}
protected static ParseContext.Document testDocumentWithTextField() {
return testDocumentWithTextField("test");
}
protected static ParseContext.Document testDocumentWithTextField(String value) {
ParseContext.Document document = testDocument();
document.add(new TextField("value", value, Field.Store.YES));
return document;
}
protected static ParseContext.Document testDocument() {
return new ParseContext.Document();
}
public static ParsedDocument createParsedDoc(String id, String routing) {
return testParsedDocument(id, routing, testDocumentWithTextField(), new BytesArray("{ \"value\" : \"test\" }"), null);
}
public static ParsedDocument createParsedDoc(String id, String routing, boolean recoverySource) {
return testParsedDocument(id, routing, testDocumentWithTextField(), new BytesArray("{ \"value\" : \"test\" }"), null,
recoverySource);
}
protected static ParsedDocument testParsedDocument(
String id, String routing, ParseContext.Document document, BytesReference source, Mapping mappingUpdate) {
return testParsedDocument(id, routing, document, source, mappingUpdate, false);
}
protected static ParsedDocument testParsedDocument(
String id, String routing, ParseContext.Document document, BytesReference source, Mapping mappingUpdate,
boolean recoverySource) {
Field uidField = new Field("_id", Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE);
Field versionField = new NumericDocValuesField("_version", 0);
SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
document.add(uidField);
document.add(versionField);
document.add(seqID.seqNo);
document.add(seqID.seqNoDocValue);
document.add(seqID.primaryTerm);
BytesRef ref = source.toBytesRef();
if (recoverySource) {
document.add(new StoredField(SourceFieldMapper.RECOVERY_SOURCE_NAME, ref.bytes, ref.offset, ref.length));
document.add(new NumericDocValuesField(SourceFieldMapper.RECOVERY_SOURCE_NAME, 1));
} else {
document.add(new StoredField(SourceFieldMapper.NAME, ref.bytes, ref.offset, ref.length));
}
return new ParsedDocument(versionField, seqID, id, routing, Arrays.asList(document), source, XContentType.JSON,
mappingUpdate);
}
public static CheckedBiFunction<String, Integer, ParsedDocument, IOException> nestedParsedDocFactory() throws Exception {
final MapperService mapperService = createMapperService();
final String nestedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("nested_field").field("type", "nested").endObject().endObject()
.endObject().endObject());
final DocumentMapper nestedMapper = mapperService.merge("type", new CompressedXContent(nestedMapping),
MapperService.MergeReason.MAPPING_UPDATE);
return (docId, nestedFieldValues) -> {
final XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value");
if (nestedFieldValues > 0) {
XContentBuilder nestedField = source.startObject("nested_field");
for (int i = 0; i < nestedFieldValues; i++) {
nestedField.field("field-" + i, "value-" + i);
}
source.endObject();
}
source.endObject();
return nestedMapper.parse(new SourceToParse("test", docId, BytesReference.bytes(source), XContentType.JSON));
};
}
protected Store createStore() throws IOException {
return createStore(newDirectory());
}
protected Store createStore(final Directory directory) throws IOException {
return createStore(INDEX_SETTINGS, directory);
}
protected Store createStore(final IndexSettings indexSettings, final Directory directory) throws IOException {
return new Store(shardId, indexSettings, directory, new DummyShardLock(shardId));
}
protected Translog createTranslog(LongSupplier primaryTermSupplier) throws IOException {
return createTranslog(primaryTranslogDir, primaryTermSupplier);
}
protected Translog createTranslog(Path translogPath, LongSupplier primaryTermSupplier) throws IOException {
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE);
String translogUUID = Translog.createEmptyTranslog(translogPath, SequenceNumbers.NO_OPS_PERFORMED, shardId,
primaryTermSupplier.getAsLong());
return new Translog(translogConfig, translogUUID, new TranslogDeletionPolicy(),
() -> SequenceNumbers.NO_OPS_PERFORMED, primaryTermSupplier, seqNo -> {});
}
protected TranslogHandler createTranslogHandler(IndexSettings indexSettings) {
return new TranslogHandler(xContentRegistry(), indexSettings);
}
protected InternalEngine createEngine(Store store, Path translogPath) throws IOException {
return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null);
}
protected InternalEngine createEngine(Store store, Path translogPath, LongSupplier globalCheckpointSupplier) throws IOException {
return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier);
}
protected InternalEngine createEngine(
Store store,
Path translogPath,
BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier) throws IOException {
return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null, localCheckpointTrackerSupplier, null);
}
protected InternalEngine createEngine(
Store store,
Path translogPath,
BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier,
ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation) throws IOException {
return createEngine(
defaultSettings, store, translogPath, newMergePolicy(), null, localCheckpointTrackerSupplier, null, seqNoForOperation);
}
protected InternalEngine createEngine(
IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy) throws IOException {
return createEngine(indexSettings, store, translogPath, mergePolicy, null);
}
protected InternalEngine createEngine(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy,
@Nullable IndexWriterFactory indexWriterFactory) throws IOException {
return createEngine(indexSettings, store, translogPath, mergePolicy, indexWriterFactory, null, null);
}
protected InternalEngine createEngine(
IndexSettings indexSettings,
Store store,
Path translogPath,
MergePolicy mergePolicy,
@Nullable IndexWriterFactory indexWriterFactory,
@Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier,
@Nullable LongSupplier globalCheckpointSupplier) throws IOException {
return createEngine(
indexSettings, store, translogPath, mergePolicy, indexWriterFactory, localCheckpointTrackerSupplier, null, null,
globalCheckpointSupplier);
}
protected InternalEngine createEngine(
IndexSettings indexSettings,
Store store,
Path translogPath,
MergePolicy mergePolicy,
@Nullable IndexWriterFactory indexWriterFactory,
@Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier,
@Nullable LongSupplier globalCheckpointSupplier,
@Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation) throws IOException {
return createEngine(
indexSettings,
store,
translogPath,
mergePolicy,
indexWriterFactory,
localCheckpointTrackerSupplier,
seqNoForOperation,
null,
globalCheckpointSupplier);
}
protected InternalEngine createEngine(
IndexSettings indexSettings,
Store store,
Path translogPath,
MergePolicy mergePolicy,
@Nullable IndexWriterFactory indexWriterFactory,
@Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier,
@Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation,
@Nullable Sort indexSort,
@Nullable LongSupplier globalCheckpointSupplier) throws IOException {
EngineConfig config = config(indexSettings, store, translogPath, mergePolicy, null, indexSort, globalCheckpointSupplier);
return createEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config);
}
protected InternalEngine createEngine(EngineConfig config) throws IOException {
return createEngine(null, null, null, config);
}
protected InternalEngine createEngine(@Nullable IndexWriterFactory indexWriterFactory,
@Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier,
@Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation,
EngineConfig config) throws IOException {
final Store store = config.getStore();
final Directory directory = store.directory();
if (Lucene.indexExists(directory) == false) {
store.createEmpty();
final String translogUuid = Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(),
SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUuid);
}
InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config);
internalEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
return internalEngine;
}
public static InternalEngine createEngine(EngineConfig engineConfig, int maxDocs) {
return new InternalEngine(engineConfig, maxDocs, LocalCheckpointTracker::new);
}
@FunctionalInterface
public interface IndexWriterFactory {
IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException;
}
/**
* Generate a new sequence number and return it. Only works on InternalEngines
*/
public static long generateNewSeqNo(final Engine engine) {
assert engine instanceof InternalEngine : "expected InternalEngine, got: " + engine.getClass();
InternalEngine internalEngine = (InternalEngine) engine;
return internalEngine.getLocalCheckpointTracker().generateSeqNo();
}
public static InternalEngine createInternalEngine(
@Nullable final IndexWriterFactory indexWriterFactory,
@Nullable final BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier,
@Nullable final ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation,
final EngineConfig config) {
if (localCheckpointTrackerSupplier == null) {
return new InternalTestEngine(config) {
@Override
IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException {
return (indexWriterFactory != null) ?
indexWriterFactory.createWriter(directory, iwc) :
super.createWriter(directory, iwc);
}
@Override
protected long doGenerateSeqNoForOperation(final Operation operation) {
return seqNoForOperation != null
? seqNoForOperation.applyAsLong(this, operation)
: super.doGenerateSeqNoForOperation(operation);
}
};
} else {
return new InternalTestEngine(config, IndexWriter.MAX_DOCS, localCheckpointTrackerSupplier) {
@Override
IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException {
return (indexWriterFactory != null) ?
indexWriterFactory.createWriter(directory, iwc) :
super.createWriter(directory, iwc);
}
@Override
protected long doGenerateSeqNoForOperation(final Operation operation) {
return seqNoForOperation != null
? seqNoForOperation.applyAsLong(this, operation)
: super.doGenerateSeqNoForOperation(operation);
}
};
}
}
public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy,
ReferenceManager.RefreshListener refreshListener) {
return config(indexSettings, store, translogPath, mergePolicy, refreshListener, null, () -> SequenceNumbers.NO_OPS_PERFORMED);
}
public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy,
ReferenceManager.RefreshListener refreshListener, Sort indexSort, LongSupplier globalCheckpointSupplier) {
return config(
indexSettings,
store,
translogPath,
mergePolicy,
refreshListener,
indexSort,
globalCheckpointSupplier,
globalCheckpointSupplier == null ? null : () -> RetentionLeases.EMPTY);
}
public EngineConfig config(
final IndexSettings indexSettings,
final Store store,
final Path translogPath,
final MergePolicy mergePolicy,
final ReferenceManager.RefreshListener refreshListener,
final Sort indexSort,
final LongSupplier globalCheckpointSupplier,
final Supplier<RetentionLeases> retentionLeasesSupplier) {
return config(
indexSettings,
store,
translogPath,
mergePolicy,
refreshListener,
null,
indexSort,
globalCheckpointSupplier,
retentionLeasesSupplier,
new NoneCircuitBreakerService());
}
public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy,
ReferenceManager.RefreshListener externalRefreshListener,
ReferenceManager.RefreshListener internalRefreshListener,
Sort indexSort, @Nullable LongSupplier maybeGlobalCheckpointSupplier,
CircuitBreakerService breakerService) {
return config(
indexSettings,
store,
translogPath,
mergePolicy,
externalRefreshListener,
internalRefreshListener,
indexSort,
maybeGlobalCheckpointSupplier,
maybeGlobalCheckpointSupplier == null ? null : () -> RetentionLeases.EMPTY,
breakerService);
}
public EngineConfig config(
final IndexSettings indexSettings,
final Store store,
final Path translogPath,
final MergePolicy mergePolicy,
final ReferenceManager.RefreshListener externalRefreshListener,
final ReferenceManager.RefreshListener internalRefreshListener,
final Sort indexSort,
final @Nullable LongSupplier maybeGlobalCheckpointSupplier,
final @Nullable Supplier<RetentionLeases> maybeRetentionLeasesSupplier,
final CircuitBreakerService breakerService) {
final IndexWriterConfig iwc = newIndexWriterConfig();
final TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE);
final Engine.EventListener eventListener = new Engine.EventListener() {}; // we don't need to notify anybody in this test
final List<ReferenceManager.RefreshListener> extRefreshListenerList =
externalRefreshListener == null ? emptyList() : Collections.singletonList(externalRefreshListener);
final List<ReferenceManager.RefreshListener> intRefreshListenerList =
internalRefreshListener == null ? emptyList() : Collections.singletonList(internalRefreshListener);
final LongSupplier globalCheckpointSupplier;
final Supplier<RetentionLeases> retentionLeasesSupplier;
if (maybeGlobalCheckpointSupplier == null) {
assert maybeRetentionLeasesSupplier == null;
final ReplicationTracker replicationTracker = new ReplicationTracker(
shardId,
allocationId.getId(),
indexSettings,
randomNonNegativeLong(),
SequenceNumbers.NO_OPS_PERFORMED,
update -> {},
() -> 0L,
(leases, listener) -> listener.onResponse(new ReplicationResponse()),
() -> SafeCommitInfo.EMPTY);
globalCheckpointSupplier = replicationTracker;
retentionLeasesSupplier = replicationTracker::getRetentionLeases;
} else {
assert maybeRetentionLeasesSupplier != null;
globalCheckpointSupplier = maybeGlobalCheckpointSupplier;
retentionLeasesSupplier = maybeRetentionLeasesSupplier;
}
return new EngineConfig(
shardId,
threadPool,
indexSettings,
null,
store,
mergePolicy,
iwc.getAnalyzer(),
iwc.getSimilarity(),
new CodecService(null, logger),
eventListener,
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
translogConfig,
TimeValue.timeValueMinutes(5),
extRefreshListenerList,
intRefreshListenerList,
indexSort,
breakerService,
globalCheckpointSupplier,
retentionLeasesSupplier,
primaryTerm,
IndexModule.DEFAULT_SNAPSHOT_COMMIT_SUPPLIER);
}
protected EngineConfig config(EngineConfig config, Store store, Path translogPath) {
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test",
Settings.builder().put(config.getIndexSettings().getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build());
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE);
return new EngineConfig(config.getShardId(), config.getThreadPool(),
indexSettings, config.getWarmer(), store, config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(),
new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(),
translogConfig, config.getFlushMergesAfter(), config.getExternalRefreshListener(),
config.getInternalRefreshListener(), config.getIndexSort(), config.getCircuitBreakerService(),
config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(),
config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier());
}
protected EngineConfig noOpConfig(IndexSettings indexSettings, Store store, Path translogPath) {
return noOpConfig(indexSettings, store, translogPath, null);
}
protected EngineConfig noOpConfig(IndexSettings indexSettings, Store store, Path translogPath, LongSupplier globalCheckpointSupplier) {
return config(indexSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier);
}
protected static final BytesReference B_1 = new BytesArray(new byte[]{1});
protected static final BytesReference B_2 = new BytesArray(new byte[]{2});
protected static final BytesReference B_3 = new BytesArray(new byte[]{3});
protected static final BytesArray SOURCE = bytesArray("{}");
protected static BytesArray bytesArray(String string) {
return new BytesArray(string.getBytes(Charset.defaultCharset()));
}
public static Term newUid(String id) {
return new Term("_id", Uid.encodeId(id));
}
public static Term newUid(ParsedDocument doc) {
return newUid(doc.id());
}
protected Engine.Get newGet(boolean realtime, ParsedDocument doc) {
return new Engine.Get(realtime, realtime, doc.id());
}
protected Engine.Index indexForDoc(ParsedDocument doc) {
return new Engine.Index(newUid(doc), primaryTerm.get(), doc);
}
protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long seqNo,
boolean isRetry) {
return new Engine.Index(newUid(doc), doc, seqNo, primaryTerm.get(), version, null, Engine.Operation.Origin.REPLICA,
System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry, SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
}
protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) {
return new Engine.Delete(id, newUid(id), seqNo, 1, version, null, Engine.Operation.Origin.REPLICA, startTime,
SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
}
protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException {
assertVisibleCount(engine, numDocs, true);
}
protected static void assertVisibleCount(InternalEngine engine, int numDocs, boolean refresh) throws IOException {
if (refresh) {
engine.refresh("test");
}
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), collector);
assertThat(collector.getTotalHits(), equalTo(numDocs));
}
}
public static List<Engine.Operation> generateSingleDocHistory(boolean forReplica, VersionType versionType,
long primaryTerm, int minOpCount, int maxOpCount, String docId) {
final int numOfOps = randomIntBetween(minOpCount, maxOpCount);
final List<Engine.Operation> ops = new ArrayList<>();
final Term id = newUid(docId);
final int startWithSeqNo = 0;
final String valuePrefix = (forReplica ? "r_" : "p_" ) + docId + "_";
final boolean incrementTermWhenIntroducingSeqNo = randomBoolean();
for (int i = 0; i < numOfOps; i++) {
final Engine.Operation op;
final long version;
switch (versionType) {
case INTERNAL:
version = forReplica ? i : Versions.MATCH_ANY;
break;
case EXTERNAL:
version = i;
break;
case EXTERNAL_GTE:
version = randomBoolean() ? Math.max(i - 1, 0) : i;
break;
default:
throw new UnsupportedOperationException("unknown version type: " + versionType);
}
if (randomBoolean()) {
op = new Engine.Index(id, testParsedDocument(docId, null, testDocumentWithTextField(valuePrefix + i), SOURCE, null),
forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm,
version,
forReplica ? null : versionType,
forReplica ? REPLICA : PRIMARY,
System.currentTimeMillis(), -1, false,
SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
} else {
op = new Engine.Delete(docId, id,
forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm,
version,
forReplica ? null : versionType,
forReplica ? REPLICA : PRIMARY,
System.currentTimeMillis(), SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
}
ops.add(op);
}
return ops;
}
public List<Engine.Operation> generateHistoryOnReplica(int numOps, boolean allowGapInSeqNo, boolean allowDuplicate,
boolean includeNestedDocs) throws Exception {
return generateHistoryOnReplica(numOps, 0L, allowGapInSeqNo, allowDuplicate, includeNestedDocs);
}
public List<Engine.Operation> generateHistoryOnReplica(int numOps, long startingSeqNo, boolean allowGapInSeqNo, boolean allowDuplicate,
boolean includeNestedDocs) throws Exception {
long seqNo = startingSeqNo;
final int maxIdValue = randomInt(numOps * 2);
final List<Engine.Operation> operations = new ArrayList<>(numOps);
CheckedBiFunction<String, Integer, ParsedDocument, IOException> nestedParsedDocFactory = nestedParsedDocFactory();
for (int i = 0; i < numOps; i++) {
final String id = Integer.toString(randomInt(maxIdValue));
final Engine.Operation.TYPE opType = randomFrom(Engine.Operation.TYPE.values());
final boolean isNestedDoc = includeNestedDocs && opType == Engine.Operation.TYPE.INDEX && randomBoolean();
final int nestedValues = between(0, 3);
final long startTime = threadPool.relativeTimeInNanos();
final int copies = allowDuplicate && rarely() ? between(2, 4) : 1;
for (int copy = 0; copy < copies; copy++) {
final ParsedDocument doc = isNestedDoc ? nestedParsedDocFactory.apply(id, nestedValues) : createParsedDoc(id, null);
switch (opType) {
case INDEX:
operations.add(new Engine.Index(EngineTestCase.newUid(doc), doc, seqNo, primaryTerm.get(),
i, null, randomFrom(REPLICA, PEER_RECOVERY), startTime, -1, true, SequenceNumbers.UNASSIGNED_SEQ_NO, 0));
break;
case DELETE:
operations.add(new Engine.Delete(doc.id(), EngineTestCase.newUid(doc), seqNo, primaryTerm.get(),
i, null, randomFrom(REPLICA, PEER_RECOVERY), startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0));
break;
case NO_OP:
operations.add(new Engine.NoOp(seqNo, primaryTerm.get(),
randomFrom(REPLICA, PEER_RECOVERY), startTime, "test-" + i));
break;
default:
throw new IllegalStateException("Unknown operation type [" + opType + "]");
}
}
seqNo++;
if (allowGapInSeqNo && rarely()) {
seqNo++;
}
}
Randomness.shuffle(operations);
return operations;
}
public static void assertOpsOnReplica(
final List<Engine.Operation> ops,
final InternalEngine replicaEngine,
boolean shuffleOps,
final Logger logger) throws IOException {
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOp;
lastFieldValue = index.docs().get(0).get("value");
} else {
// delete
lastFieldValue = null;
}
if (shuffleOps) {
int firstOpWithSeqNo = 0;
while (firstOpWithSeqNo < ops.size() && ops.get(firstOpWithSeqNo).seqNo() < 0) {
firstOpWithSeqNo++;
}
// shuffle ops but make sure legacy ops are first
shuffle(ops.subList(0, firstOpWithSeqNo), random());
shuffle(ops.subList(firstOpWithSeqNo, ops.size()), random());
}
boolean firstOp = true;
for (Engine.Operation op : ops) {
logger.info("performing [{}], v [{}], seq# [{}], term [{}]",
op.operationType().name().charAt(0), op.version(), op.seqNo(), op.primaryTerm());
if (op instanceof Engine.Index) {
Engine.IndexResult result = replicaEngine.index((Engine.Index) op);
// replicas don't really care to about creation status of documents
// this allows to ignore the case where a document was found in the live version maps in
// a delete state and return false for the created flag in favor of code simplicity
// as deleted or not. This check is just signal regression so a decision can be made if it's
// intentional
assertThat(result.isCreated(), equalTo(firstOp));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
} else {
Engine.DeleteResult result = replicaEngine.delete((Engine.Delete) op);
// Replicas don't really care to about found status of documents
// this allows to ignore the case where a document was found in the live version maps in
// a delete state and return true for the found flag in favor of code simplicity
// his check is just signal regression so a decision can be made if it's
// intentional
assertThat(result.isFound(), equalTo(firstOp == false));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
}
if (randomBoolean()) {
replicaEngine.refresh("test");
}
if (randomBoolean()) {
replicaEngine.flush();
replicaEngine.refresh("test");
}
firstOp = false;
}
assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1);
if (lastFieldValue != null) {
try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
public static void concurrentlyApplyOps(List<Engine.Operation> ops, InternalEngine engine) throws InterruptedException {
Thread[] thread = new Thread[randomIntBetween(3, 5)];
CountDownLatch startGun = new CountDownLatch(thread.length);
AtomicInteger offset = new AtomicInteger(-1);
for (int i = 0; i < thread.length; i++) {
thread[i] = new Thread(() -> {
startGun.countDown();
try {
startGun.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
int docOffset;
while ((docOffset = offset.incrementAndGet()) < ops.size()) {
try {
applyOperation(engine, ops.get(docOffset));
if ((docOffset + 1) % 4 == 0) {
engine.refresh("test");
}
if (rarely()) {
engine.flush();
}
} catch (IOException e) {
throw new AssertionError(e);
}
}
});
thread[i].start();
}
for (int i = 0; i < thread.length; i++) {
thread[i].join();
}
}
public static void applyOperations(Engine engine, List<Engine.Operation> operations) throws IOException {
for (Engine.Operation operation : operations) {
applyOperation(engine, operation);
if (randomInt(100) < 10) {
engine.refresh("test");
}
if (rarely()) {
engine.flush();
}
}
}
public static Engine.Result applyOperation(Engine engine, Engine.Operation operation) throws IOException {
final Engine.Result result;
switch (operation.operationType()) {
case INDEX:
result = engine.index((Engine.Index) operation);
break;
case DELETE:
result = engine.delete((Engine.Delete) operation);
break;
case NO_OP:
result = engine.noOp((Engine.NoOp) operation);
break;
default:
throw new IllegalStateException("No operation defined for [" + operation + "]");
}
return result;
}
/**
* Gets a collection of tuples of docId, sequence number, and primary term of all live documents in the provided engine.
*/
public static List<DocIdSeqNoAndSource> getDocIds(Engine engine, boolean refresh) throws IOException {
if (refresh) {
engine.refresh("test_get_doc_ids");
}
try (Engine.Searcher searcher = engine.acquireSearcher("test_get_doc_ids", Engine.SearcherScope.INTERNAL)) {
List<DocIdSeqNoAndSource> docs = new ArrayList<>();
for (LeafReaderContext leafContext : searcher.getIndexReader().leaves()) {
LeafReader reader = leafContext.reader();
NumericDocValues seqNoDocValues = reader.getNumericDocValues(SeqNoFieldMapper.NAME);
NumericDocValues primaryTermDocValues = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
NumericDocValues versionDocValues = reader.getNumericDocValues(VersionFieldMapper.NAME);
Bits liveDocs = reader.getLiveDocs();
for (int i = 0; i < reader.maxDoc(); i++) {
if (liveDocs == null || liveDocs.get(i)) {
if (primaryTermDocValues.advanceExact(i) == false) {
// We have to skip non-root docs because its _id field is not stored (indexed only).
continue;
}
final long primaryTerm = primaryTermDocValues.longValue();
Document doc = reader.document(i, Set.of(IdFieldMapper.NAME, SourceFieldMapper.NAME));
BytesRef binaryID = doc.getBinaryValue(IdFieldMapper.NAME);
String id = Uid.decodeId(Arrays.copyOfRange(binaryID.bytes, binaryID.offset, binaryID.offset + binaryID.length));
final BytesRef source = doc.getBinaryValue(SourceFieldMapper.NAME);
if (seqNoDocValues.advanceExact(i) == false) {
throw new AssertionError("seqNoDocValues not found for doc[" + i + "] id[" + id + "]");
}
final long seqNo = seqNoDocValues.longValue();
if (versionDocValues.advanceExact(i) == false) {
throw new AssertionError("versionDocValues not found for doc[" + i + "] id[" + id + "]");
}
final long version = versionDocValues.longValue();
docs.add(new DocIdSeqNoAndSource(id, source, seqNo, primaryTerm, version));
}
}
}
docs.sort(Comparator.comparingLong(DocIdSeqNoAndSource::getSeqNo)
.thenComparingLong(DocIdSeqNoAndSource::getPrimaryTerm)
.thenComparing((DocIdSeqNoAndSource::getId)));
return docs;
}
}
/**
* Reads all engine operations that have been processed by the engine from Lucene index.
* The returned operations are sorted and de-duplicated, thus each sequence number will be have at most one operation.
*/
public static List<Translog.Operation> readAllOperationsInLucene(Engine engine) throws IOException {
final List<Translog.Operation> operations = new ArrayList<>();
try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) {
Translog.Operation op;
while ((op = snapshot.next()) != null){
operations.add(op);
}
}
return operations;
}
/**
* Asserts the provided engine has a consistent document history between translog and Lucene index.
*/
public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine engine) throws IOException {
if (engine instanceof InternalEngine == false) {
return;
}
final List<Translog.Operation> translogOps = new ArrayList<>();
try (Translog.Snapshot snapshot = EngineTestCase.getTranslog(engine).newSnapshot()) {
Translog.Operation op;
while ((op = snapshot.next()) != null) {
translogOps.add(op);
}
}
final Map<Long, Translog.Operation> luceneOps = readAllOperationsInLucene(engine).stream()
.collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity()));
final long maxSeqNo = ((InternalEngine) engine).getLocalCheckpointTracker().getMaxSeqNo();
for (Translog.Operation op : translogOps) {
assertThat("translog operation [" + op + "] > max_seq_no[" + maxSeqNo + "]", op.seqNo(), lessThanOrEqualTo(maxSeqNo));
}
for (Translog.Operation op : luceneOps.values()) {
assertThat("lucene operation [" + op + "] > max_seq_no[" + maxSeqNo + "]", op.seqNo(), lessThanOrEqualTo(maxSeqNo));
}
final long globalCheckpoint = EngineTestCase.getTranslog(engine).getLastSyncedGlobalCheckpoint();
final long retainedOps = engine.config().getIndexSettings().getSoftDeleteRetentionOperations();
final long minSeqNoToRetain;
if (engine.config().getIndexSettings().isSoftDeleteEnabled()) {
try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) {
final long seqNoForRecovery = Long.parseLong(
safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1;
minSeqNoToRetain = Math.min(seqNoForRecovery, globalCheckpoint + 1 - retainedOps);
}
} else {
minSeqNoToRetain = engine.getMinRetainedSeqNo();
}
for (Translog.Operation translogOp : translogOps) {
final Translog.Operation luceneOp = luceneOps.get(translogOp.seqNo());
if (luceneOp == null) {
if (minSeqNoToRetain <= translogOp.seqNo()) {
fail("Operation not found seq# [" + translogOp.seqNo() + "], global checkpoint [" + globalCheckpoint + "], " +
"retention policy [" + retainedOps + "], maxSeqNo [" + maxSeqNo + "], translog op [" + translogOp + "]");
} else {
continue;
}
}
assertThat(luceneOp, notNullValue());
assertThat(luceneOp.toString(), luceneOp.primaryTerm(), equalTo(translogOp.primaryTerm()));
assertThat(luceneOp.opType(), equalTo(translogOp.opType()));
if (luceneOp.opType() == Translog.Operation.Type.INDEX) {
assertThat(luceneOp.getSource().source, equalTo(translogOp.getSource().source));
}
}
}
/**
* Asserts that the max_seq_no stored in the commit's user_data is never smaller than seq_no of any document in the commit.
*/
public static void assertMaxSeqNoInCommitUserData(Engine engine) throws Exception {
List<IndexCommit> commits = DirectoryReader.listCommits(engine.store.directory());
for (IndexCommit commit : commits) {
try (DirectoryReader reader = DirectoryReader.open(commit)) {
assertThat(Long.parseLong(commit.getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
greaterThanOrEqualTo(maxSeqNosInReader(reader)));
}
}
}
public static void assertAtMostOneLuceneDocumentPerSequenceNumber(Engine engine) throws IOException {
if (engine instanceof InternalEngine) {
try {
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertAtMostOneLuceneDocumentPerSequenceNumber(engine.config().getIndexSettings(), searcher.getDirectoryReader());
}
} catch (AlreadyClosedException ignored) {
// engine was closed
}
}
}
public static void assertAtMostOneLuceneDocumentPerSequenceNumber(IndexSettings indexSettings,
DirectoryReader reader) throws IOException {
Set<Long> seqNos = new HashSet<>();
final DirectoryReader wrappedReader = indexSettings.isSoftDeleteEnabled() ? Lucene.wrapAllDocsLive(reader) : reader;
for (LeafReaderContext leaf : wrappedReader.leaves()) {
NumericDocValues primaryTermDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
NumericDocValues seqNoDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.NAME);
int docId;
while ((docId = seqNoDocValues.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
assertTrue(seqNoDocValues.advanceExact(docId));
long seqNo = seqNoDocValues.longValue();
assertThat(seqNo, greaterThanOrEqualTo(0L));
if (primaryTermDocValues.advanceExact(docId)) {
if (seqNos.add(seqNo) == false) {
IdStoredFieldLoader idLoader = new IdStoredFieldLoader(leaf.reader());
throw new AssertionError("found multiple documents for seq=" + seqNo + " id=" + idLoader.id(docId));
}
}
}
}
}
public static MapperService createMapperService() throws IOException {
IndexMetadata indexMetadata = IndexMetadata.builder("test")
.settings(Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1))
.putMapping("{\"properties\": {}}")
.build();
MapperService mapperService = MapperTestUtils.newMapperService(new NamedXContentRegistry(ClusterModule.getNamedXWriteables()),
createTempDir(), Settings.EMPTY, "test");
mapperService.merge(indexMetadata, MapperService.MergeReason.MAPPING_UPDATE);
return mapperService;
}
public static MappingLookup mappingLookup() {
try {
return createMapperService().mappingLookup();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Exposes a translog associated with the given engine for testing purpose.
*/
public static Translog getTranslog(Engine engine) {
assert engine instanceof InternalEngine : "only InternalEngines have translogs, got: " + engine.getClass();
InternalEngine internalEngine = (InternalEngine) engine;
return internalEngine.getTranslog();
}
/**
* Waits for all operations up to the provided sequence number to complete in the given internal engine.
*
* @param seqNo the sequence number that the checkpoint must advance to before this method returns
* @throws InterruptedException if the thread was interrupted while blocking on the condition
*/
public static void waitForOpsToComplete(InternalEngine engine, long seqNo) throws Exception {
assertBusy(() ->
assertThat(engine.getLocalCheckpointTracker().getProcessedCheckpoint(), greaterThanOrEqualTo(seqNo)));
}
public static boolean hasSnapshottedCommits(Engine engine) {
assert engine instanceof InternalEngine : "only InternalEngines have snapshotted commits, got: " + engine.getClass();
InternalEngine internalEngine = (InternalEngine) engine;
return internalEngine.hasSnapshottedCommits();
}
public static final class PrimaryTermSupplier implements LongSupplier {
private final AtomicLong term;
PrimaryTermSupplier(long initialTerm) {
this.term = new AtomicLong(initialTerm);
}
public long get() {
return term.get();
}
public void set(long newTerm) {
this.term.set(newTerm);
}
@Override
public long getAsLong() {
return get();
}
}
static long maxSeqNosInReader(DirectoryReader reader) throws IOException {
long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
for (LeafReaderContext leaf : reader.leaves()) {
final NumericDocValues seqNoDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.NAME);
while (seqNoDocValues.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
maxSeqNo = SequenceNumbers.max(maxSeqNo, seqNoDocValues.longValue());
}
}
return maxSeqNo;
}
/**
* Returns the number of times a version was looked up either from version map or from the index.
*/
public static long getNumVersionLookups(Engine engine) {
return ((InternalEngine) engine).getNumVersionLookups();
}
public static long getInFlightDocCount(Engine engine) {
if (engine instanceof InternalEngine) {
return ((InternalEngine) engine).getInFlightDocCount();
} else {
return 0;
}
}
public static void assertNoInFlightDocuments(Engine engine) throws Exception {
assertBusy(() -> assertThat(getInFlightDocCount(engine), equalTo(0L)));
}
public static final class MatchingDirectoryReader extends FilterDirectoryReader {
private final Query query;
public MatchingDirectoryReader(DirectoryReader in, Query query) throws IOException {
super(in, new SubReaderWrapper() {
@Override
public LeafReader wrap(LeafReader leaf) {
try {
final IndexSearcher searcher = new IndexSearcher(leaf);
final Weight weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1.0f);
final Scorer scorer = weight.scorer(leaf.getContext());
final DocIdSetIterator iterator = scorer != null ? scorer.iterator() : null;
final FixedBitSet liveDocs = new FixedBitSet(leaf.maxDoc());
if (iterator != null) {
for (int docId = iterator.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) {
if (leaf.getLiveDocs() == null || leaf.getLiveDocs().get(docId)) {
liveDocs.set(docId);
}
}
}
return new FilterLeafReader(leaf) {
@Override
public Bits getLiveDocs() {
return liveDocs;
}
@Override
public CacheHelper getCoreCacheHelper() {
return leaf.getCoreCacheHelper();
}
@Override
public CacheHelper getReaderCacheHelper() {
return null; // modify liveDocs
}
};
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
});
this.query = query;
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new MatchingDirectoryReader(in, query);
}
@Override
public CacheHelper getReaderCacheHelper() {
// TODO: We should not return the ReaderCacheHelper if we modify the liveDocs,
// but some caching components (e.g., global ordinals) require this cache key.
return in.getReaderCacheHelper();
}
}
public static CheckedFunction<DirectoryReader, DirectoryReader, IOException> randomReaderWrapper() {
if (randomBoolean()) {
return reader -> reader;
} else {
return reader -> new MatchingDirectoryReader(reader, new MatchAllDocsQuery());
}
}
public static Function<Engine.Searcher, Engine.Searcher> randomSearcherWrapper() {
if (randomBoolean()) {
return Function.identity();
} else {
final CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper = randomReaderWrapper();
return searcher -> SearcherHelper.wrapSearcher(searcher, readerWrapper);
}
}
public static void checkNoSoftDeletesLoaded(ReadOnlyEngine readOnlyEngine) {
if (readOnlyEngine.lazilyLoadSoftDeletes == false) {
throw new IllegalStateException("method should only be called when lazily loading soft-deletes is enabled");
}
try (Engine.Searcher searcher = readOnlyEngine.acquireSearcher("soft-deletes-check", Engine.SearcherScope.INTERNAL)) {
for (LeafReaderContext ctx : searcher.getIndexReader().getContext().leaves()) {
LazySoftDeletesDirectoryReaderWrapper.LazyBits lazyBits = lazyBits(ctx.reader());
if (lazyBits != null && lazyBits.initialized()) {
throw new IllegalStateException("soft-deletes loaded");
}
}
}
}
@Nullable
private static LazySoftDeletesDirectoryReaderWrapper.LazyBits lazyBits(LeafReader reader) {
if (reader instanceof LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterLeafReader) {
return ((LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterLeafReader) reader).getLiveDocs();
} else if (reader instanceof LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterCodecReader) {
return ((LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterCodecReader) reader).getLiveDocs();
} else if (reader instanceof FilterLeafReader) {
final FilterLeafReader fReader = (FilterLeafReader) reader;
return lazyBits(FilterLeafReader.unwrap(fReader));
} else if (reader instanceof FilterCodecReader) {
final FilterCodecReader fReader = (FilterCodecReader) reader;
return lazyBits(FilterCodecReader.unwrap(fReader));
} else if (reader instanceof SegmentReader) {
return null;
}
// hard fail - we can't get the lazybits
throw new IllegalStateException("Can not extract lazy bits from given index reader [" + reader + "]");
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.sdklib.internal.build;
import com.android.sdklib.internal.build.SignedJarBuilder.IZipEntryFilter.ZipAbortException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.GeneralSecurityException;
import java.security.NoSuchAlgorithmException;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/*<bch>
import java.io.FilterOutputStream;
import java.security.Signature;
import java.security.SignatureException;
import sun.misc.BASE64Encoder;
import sun.security.pkcs.ContentInfo;
import sun.security.pkcs.PKCS7;
import sun.security.pkcs.SignerInfo;
import sun.security.x509.AlgorithmId;
import sun.security.x509.X500Name;
import java.io.ByteArrayOutputStream;
</bch>*/
/*<bch>import java.io.FileInputStream;
import java.io.FilterOutputStream;
import java.io.IOException;</bch>*/
/*<bch>import java.io.PrintStream;
import java.security.DigestOutputStream;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;</bch>*/
/*<bch>import java.security.SignatureException;
import java.security.cert.X509Certificate;
import java.util.Map;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;</bch>*/
/**
* A Jar file builder with signature support.
*/
public class SignedJarBuilder {
/*<bch>
private static final String DIGEST_ALGORITHM = "SHA1";
private static final String DIGEST_ATTR = "SHA1-Digest";
private static final String DIGEST_MANIFEST_ATTR = "SHA1-Digest-Manifest";
</bch>*/
/** Write to another stream and also feed it to the Signature object. */
/*<bch>private static class SignatureOutputStream extends FilterOutputStream {
private Signature mSignature;
private int mCount = 0;
public SignatureOutputStream(OutputStream out, Signature sig) {
super(out);
mSignature = sig;
}
@Override
public void write(int b) throws IOException {
try {
mSignature.update((byte) b);
} catch (SignatureException e) {
throw new IOException("SignatureException: " + e);
}
super.write(b);
mCount++;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
try {
mSignature.update(b, off, len);
} catch (SignatureException e) {
throw new IOException("SignatureException: " + e);
}
super.write(b, off, len);
mCount += len;
}
public int size() {
return mCount;
}
}
</bch>*/
private JarOutputStream mOutputJar;
//<bch>private PrivateKey mKey;</bch>
//<bch>private X509Certificate mCertificate;</bch>
//<bch>private Manifest mManifest;</bch>
//<bch>private BASE64Encoder mBase64Encoder;</bch>
//<bch>private MessageDigest mMessageDigest;</bch>
private byte[] mBuffer = new byte[4096];
/**
* Classes which implement this interface provides a method to check whether a file should
* be added to a Jar file.
*/
public interface IZipEntryFilter {
/**
* An exception thrown during packaging of a zip file into APK file.
* This is typically thrown by implementations of
* {@link IZipEntryFilter#checkEntry(String)}.
*/
public static class ZipAbortException extends Exception {
private static final long serialVersionUID = 1L;
public ZipAbortException() {
super();
}
public ZipAbortException(String format, Object... args) {
super(String.format(format, args));
}
public ZipAbortException(Throwable cause, String format, Object... args) {
super(String.format(format, args), cause);
}
public ZipAbortException(Throwable cause) {
super(cause);
}
}
/**
* Checks a file for inclusion in a Jar archive.
* @param archivePath the archive file path of the entry
* @return <code>true</code> if the file should be included.
* @throws ZipAbortException if writing the file should be aborted.
*/
public boolean checkEntry(String archivePath) throws ZipAbortException;
}
/**
* Creates a {@link SignedJarBuilder} with a given output stream, and signing information.
* <p/>If either <code>key</code> or <code>certificate</code> is <code>null</code> then
* the archive will not be signed.
* @param out the {@link OutputStream} where to write the Jar archive.
* <code>null</code>.
* @throws IOException
* @throws NoSuchAlgorithmException
*/
public SignedJarBuilder(OutputStream out , /*<bch>PrivateKey*/Object key, /*X509Certificate*/Object certificate/*</bch>*/)
throws IOException, NoSuchAlgorithmException {
mOutputJar = new JarOutputStream(out);
mOutputJar.setLevel(9);
/*<bch>mKey = key;
mCertificate = certificate;
if (mKey != null && mCertificate != null) {
mManifest = new Manifest();
Attributes main = mManifest.getMainAttributes();
main.putValue("Manifest-Version", "1.0");
main.putValue("Created-By", "1.0 (Android)");
mBase64Encoder = new BASE64Encoder();
mMessageDigest = MessageDigest.getInstance(DIGEST_ALGORITHM);
}</bch>*/
}
/**
* Writes a new {@link File} into the archive.
* @param inputFile the {@link File} to write.
* @param jarPath the filepath inside the archive.
* @throws IOException
*/
public void writeFile(File inputFile, String jarPath) throws IOException {
// Get an input stream on the file.
FileInputStream fis = new FileInputStream(inputFile);
try {
// create the zip entry
JarEntry entry = new JarEntry(jarPath);
entry.setTime(inputFile.lastModified());
writeEntry(fis, entry);
} finally {
// close the file stream used to read the file
fis.close();
}
}
/**
* Copies the content of a Jar/Zip archive into the receiver archive.
* <p/>An optional {@link IZipEntryFilter} allows to selectively choose which files
* to copy over.
* @param input the {@link InputStream} for the Jar/Zip to copy.
* @param filter the filter or <code>null</code>
* @throws IOException
* @throws ZipAbortException if the {@link IZipEntryFilter} filter indicated that the write
* must be aborted.
*/
public void writeZip(InputStream input, IZipEntryFilter filter)
throws IOException, ZipAbortException {
ZipInputStream zis = new ZipInputStream(input);
try {
// loop on the entries of the intermediary package and put them in the final package.
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
String name = entry.getName();
// do not take directories or anything inside a potential META-INF folder.
if (entry.isDirectory() || name.startsWith("META-INF/")) {
continue;
}
// if we have a filter, we check the entry against it
if (filter != null && filter.checkEntry(name) == false) {
continue;
}
JarEntry newEntry;
// Preserve the STORED method of the input entry.
if (entry.getMethod() == JarEntry.STORED) {
newEntry = new JarEntry(entry);
} else {
// Create a new entry so that the compressed len is recomputed.
newEntry = new JarEntry(name);
}
writeEntry(zis, newEntry);
zis.closeEntry();
}
} finally {
zis.close();
}
}
/**
* Closes the Jar archive by creating the manifest, and signing the archive.
* @throws IOException
* @throws GeneralSecurityException
*/
public void close() throws IOException, GeneralSecurityException {
/*<bch>if (mManifest != null) {
// write the manifest to the jar file
mOutputJar.putNextEntry(new JarEntry(JarFile.MANIFEST_NAME));
mManifest.write(mOutputJar);
// CERT.SF
Signature signature = Signature.getInstance("SHA1with" + mKey.getAlgorithm());
signature.initSign(mKey);
mOutputJar.putNextEntry(new JarEntry("META-INF/CERT.SF"));
writeSignatureFile(new SignatureOutputStream(mOutputJar, signature));
// CERT.*
mOutputJar.putNextEntry(new JarEntry("META-INF/CERT." + mKey.getAlgorithm()));
writeSignatureBlock(signature, mCertificate, mKey);
}</bch>*/
mOutputJar.close();
}
/**
* Adds an entry to the output jar, and write its content from the {@link InputStream}
* @param input The input stream from where to write the entry content.
* @param entry the entry to write in the jar.
* @throws IOException
*/
private void writeEntry(InputStream input, JarEntry entry) throws IOException {
// add the entry to the jar archive
mOutputJar.putNextEntry(entry);
// read the content of the entry from the input stream, and write it into the archive.
int count;
while ((count = input.read(mBuffer)) != -1) {
mOutputJar.write(mBuffer, 0, count);
/*<bch>
// update the digest
if (mMessageDigest != null) {
mMessageDigest.update(mBuffer, 0, count);
}</bch>*/
}
// close the entry for this file
mOutputJar.closeEntry();
/*<bch>
if (mManifest != null) {
// update the manifest for this entry.
Attributes attr = mManifest.getAttributes(entry.getName());
if (attr == null) {
attr = new Attributes();
mManifest.getEntries().put(entry.getName(), attr);
}
attr.putValue(DIGEST_ATTR, mBase64Encoder.encode(mMessageDigest.digest()));
}</bch>*/
}
/** Writes a .SF file with a digest to the manifest. */
/*<bch>private void writeSignatureFile(SignatureOutputStream out)
throws IOException, GeneralSecurityException {
Manifest sf = new Manifest();
Attributes main = sf.getMainAttributes();
main.putValue("Signature-Version", "1.0");
main.putValue("Created-By", "1.0 (Android)");
BASE64Encoder base64 = new BASE64Encoder();
MessageDigest md = MessageDigest.getInstance(DIGEST_ALGORITHM);
PrintStream print = new PrintStream(
new DigestOutputStream(new ByteArrayOutputStream(), md),
true, "UTF-8");
// Digest of the entire manifest
mManifest.write(print);
print.flush();
main.putValue(DIGEST_MANIFEST_ATTR, base64.encode(md.digest()));
Map<String, Attributes> entries = mManifest.getEntries();
for (Map.Entry<String, Attributes> entry : entries.entrySet()) {
// Digest of the manifest stanza for this entry.
print.print("Name: " + entry.getKey() + "\r\n");
for (Map.Entry<Object, Object> att : entry.getValue().entrySet()) {
print.print(att.getKey() + ": " + att.getValue() + "\r\n");
}
print.print("\r\n");
print.flush();
Attributes sfAttr = new Attributes();
sfAttr.putValue(DIGEST_ATTR, base64.encode(md.digest()));
sf.getEntries().put(entry.getKey(), sfAttr);
}
sf.write(out);
// A bug in the java.util.jar implementation of Android platforms
// up to version 1.6 will cause a spurious IOException to be thrown
// if the length of the signature file is a multiple of 1024 bytes.
// As a workaround, add an extra CRLF in this case.
if ((out.size() % 1024) == 0) {
out.write('\r');
out.write('\n');
}
}</bch>*/
/** Write the certificate file with a digital signature. */
/*<bch>private void writeSignatureBlock(Signature signature, X509Certificate publicKey,
PrivateKey privateKey)
throws IOException, GeneralSecurityException {
SignerInfo signerInfo = new SignerInfo(
new X500Name(publicKey.getIssuerX500Principal().getName()),
publicKey.getSerialNumber(),
AlgorithmId.get(DIGEST_ALGORITHM),
AlgorithmId.get(privateKey.getAlgorithm()),
signature.sign());
PKCS7 pkcs7 = new PKCS7(
new AlgorithmId[] { AlgorithmId.get(DIGEST_ALGORITHM) },
new ContentInfo(ContentInfo.DATA_OID, null),
new X509Certificate[] { publicKey },
new SignerInfo[] { signerInfo });
pkcs7.encodeSignedData(mOutputJar);
}</bch>*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.query.internal.index;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.Collection;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.PartitionAttributesFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.query.Index;
import org.apache.geode.cache.query.IndexType;
import org.apache.geode.cache.query.Query;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.SelectResults;
import org.apache.geode.cache.query.data.Portfolio;
import org.apache.geode.cache.query.internal.DefaultQuery;
import org.apache.geode.cache.query.internal.IndexTrackingQueryObserver;
import org.apache.geode.cache.query.internal.IndexTrackingQueryObserver.IndexInfo;
import org.apache.geode.cache.query.internal.QueryObserver;
import org.apache.geode.cache.query.internal.QueryObserverHolder;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.AsyncInvocation;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.Wait;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
import org.apache.geode.test.junit.categories.OQLIndexTest;
@Category({OQLIndexTest.class})
public class IndexTrackingQueryObserverDUnitTest extends JUnit4CacheTestCase {
private final int NUM_BKTS = 10;
private static final String queryStr = "select * from /portfolio where ID >= 0";
protected static final int TOTAL_OBJECTS = 1000;
public IndexTrackingQueryObserverDUnitTest() {
super();
}
@Ignore("Disabled for bug 52321")
@Test
public void testIndexInfoOnRemotePartitionedRegion() throws Exception {
final Host host = Host.getHost(0);
VM ds0 = host.getVM(0);
VM ds1 = host.getVM(1);
ds0.invoke(new SerializableRunnable("Set system property") {
public void run() {
DefaultQuery.QUERY_VERBOSE = true;
}
});
ds1.invoke(new SerializableRunnable("Set system property") {
public void run() {
DefaultQuery.QUERY_VERBOSE = true;
}
});
createPR(ds0);
createPR(ds1);
createQueryIndex(ds0, true);
createQueryIndex(ds1, false);
// Populate region.
initializeRegion(ds0);
// Check query verbose on both VMs
AsyncInvocation async1 = verifyQueryVerboseData(ds0, TOTAL_OBJECTS / 2);
AsyncInvocation async2 = verifyQueryVerboseData(ds1, TOTAL_OBJECTS / 2);
// Run query on one vm only.
runQuery(ds1);
async1.join();
async2.join();
ds0.invoke(new SerializableRunnable("Test Query Verbose Data") {
public void run() {
// Reset the observer.
QueryObserverHolder.reset();
// Reset System Property
DefaultQuery.QUERY_VERBOSE = false;
}
});
ds1.invoke(new SerializableRunnable("Test Query Verbose Data") {
public void run() {
// Reset the observer.
QueryObserverHolder.reset();
// Reset System Property
DefaultQuery.QUERY_VERBOSE = false;
}
});
if (async1.exceptionOccurred()) {
Assert.fail("", async1.getException());
}
if (async1.exceptionOccurred()) {
Assert.fail("", async1.getException());
}
}
/**
* CReates a PR on a VM with NUM_BKTS buckets.
*
*/
private void createPR(VM vm) {
SerializableRunnable createDS = new SerializableRunnable("Creating PR Datastore") {
public void run() {
QueryObserver observer = QueryObserverHolder.setInstance(new IndexTrackingQueryObserver());
// Create Partition Region
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setTotalNumBuckets(NUM_BKTS);
AttributesFactory af = new AttributesFactory();
af.setPartitionAttributes(paf.create());
Region region = getCache().createRegion("portfolio", af.create());
}
};
vm.invoke(createDS);
}
private void initializeRegion(VM vm) {
SerializableRunnable initRegion = new SerializableRunnable("Initialize the PR") {
public void run() {
Region region = getCache().getRegion("portfolio");
if (region.size() == 0) {
for (int i = 0; i < TOTAL_OBJECTS; i++) {
region.put(Integer.toString(i), new Portfolio(i, i));
}
}
assertEquals(TOTAL_OBJECTS, region.size());
}
};
vm.invoke(initRegion);
}
private void createQueryIndex(VM vm, final boolean create) {
SerializableRunnable createIndex = new SerializableRunnable("Create index on PR") {
public void run() {
// Query VERBOSE has to be true for the test
assertTrue(DefaultQuery.QUERY_VERBOSE);
QueryService qs = getCache().getQueryService();
Index keyIndex1 = null;
try {
if (create) {
keyIndex1 = (IndexProtocol) qs.createIndex(IndexTrackingTestHook.INDEX_NAME,
IndexType.FUNCTIONAL, "ID",
"/portfolio ");
assertNotNull(keyIndex1);
assertTrue(keyIndex1 instanceof PartitionedIndex);
}
} catch (Exception e) {
Assert.fail("While creating Index on PR", e);
}
Region region = getCache().getRegion("portfolio");
// Inject TestHook in QueryObserver before running query.
IndexTrackingTestHook th = new IndexTrackingTestHook(region, NUM_BKTS / 2);
QueryObserver observer = QueryObserverHolder.getInstance();
assertTrue(QueryObserverHolder.hasObserver());
((IndexTrackingQueryObserver) observer).setTestHook(th);
}
};
vm.invoke(createIndex);
}
private void runQuery(VM vm) {
SerializableRunnable runQuery = new SerializableRunnable("Run Query on PR") {
public void run() {
QueryService qs = getCache().getQueryService();
Query query = qs.newQuery(queryStr);
Region region = getCache().getRegion("portfolio");
SelectResults results = null;
try {
results = (SelectResults) query.execute();
} catch (Exception e) {
Assert.fail("While running query on PR", e);
}
// The query should return all elements in region.
assertEquals(region.size(), results.size());
}
};
vm.invoke(runQuery);
}
private AsyncInvocation verifyQueryVerboseData(VM vm, final int results) {
SerializableRunnable testQueryVerbose = new SerializableRunnable("Test Query Verbose Data") {
public void run() {
// Query VERBOSE has to be true for the test
assertTrue(DefaultQuery.QUERY_VERBOSE);
// Get TestHook from observer.
QueryObserver observer = QueryObserverHolder.getInstance();
assertTrue(QueryObserverHolder.hasObserver());
final IndexTrackingTestHook th =
(IndexTrackingTestHook) ((IndexTrackingQueryObserver) observer).getTestHook();
Wait.waitForCriterion(new WaitCriterion() {
public boolean done() {
if (th.getRegionMap() != null) {
return th.getRegionMap().getResults() != null;
}
return false;
}
public String description() {
return null;
}
}, 60 * 1000, 200, true);
IndexInfo regionMap = th.getRegionMap();
Collection<Integer> rslts = regionMap.getResults().values();
int totalResults = 0;
for (Integer i : rslts) {
totalResults += i.intValue();
}
LogWriterUtils.getLogWriter().fine("Index Info result size is " + totalResults);
assertEquals(results, totalResults);
}
};
AsyncInvocation asyncInv = vm.invokeAsync(testQueryVerbose);
return asyncInv;
}
}
| |
/* Copyright (C) 2005-2011 Fabio Riccardi */
package com.lightcrafts.ui.operation;
import java.awt.event.*;
import java.awt.geom.Point2D;
import java.awt.*;
import java.beans.PropertyChangeSupport;
import java.util.Enumeration;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.undo.AbstractUndoableEdit;
import com.lightcrafts.model.ColorSelection;
import com.lightcrafts.model.Operation;
import com.lightcrafts.model.RGBColorSelection;
import com.lightcrafts.model.RGBColorSelectionPreset;
import com.lightcrafts.ui.LightZoneSkin;
import com.lightcrafts.ui.editor.EditorMode;
import com.lightcrafts.ui.toolkit.LCSliderUI;
import com.lightcrafts.ui.toolkit.DropperButton;
import com.lightcrafts.ui.mode.DropperMode;
import com.lightcrafts.ui.swing.*;
import com.lightcrafts.utils.xml.XMLException;
import com.lightcrafts.utils.xml.XmlNode;
import com.lightcrafts.utils.LCMS;
import static com.lightcrafts.ui.operation.Locale.LOCALE;
import com.lightcrafts.jai.JAIContext;
import com.lightcrafts.app.ComboFrame;
import org.jvnet.substance.SubstanceLookAndFeel;
import org.jvnet.substance.theme.SubstanceTheme;
import org.jvnet.substance.color.ColorScheme;
/**
* A <code>ColorSelectionControls</code> is-a {@link Box} that contains all
* the controls to do color-based selection.
*/
final class ColorSelectionControls extends Box {
private final PropertyChangeSupport pcs;
public static final String COLOR_SELECTION = "Color Selection";
private static LCMS.Transform ts = new LCMS.Transform(
new LCMS.Profile( JAIContext.linearProfile ), LCMS.TYPE_RGB_8,
new LCMS.Profile( JAIContext.systemProfile ), LCMS.TYPE_RGB_8,
LCMS.INTENT_PERCEPTUAL, 0
);
public static JRadioButton getSelection( ButtonGroup group ) {
for ( Enumeration e = group.getElements(); e.hasMoreElements(); ) {
final JRadioButton b = (JRadioButton)e.nextElement();
if ( b.getModel() == group.getSelection() ) {
return b;
}
}
return null;
}
private final class ColorPresets extends Box {
private final class ColorButton extends JRadioButton {
final RGBColorSelectionPreset m_preset;
ColorButton( RGBColorSelectionPreset preset ) {
m_preset = preset;
setFocusable( false );
}
}
ButtonGroup group = new ButtonGroup();
ColorPresets() {
super(BoxLayout.X_AXIS);
for ( final RGBColorSelectionPreset p : RGBColorSelectionPreset.values() ) {
if (!p.equals(RGBColorSelectionPreset.SampledColors)) {
final ColorButton button = new ColorButton(p);
group.add(button);
add(button);
if (p.equals(RGBColorSelectionPreset.AllColors)) {
button.setText( LOCALE.get( "AllLabel" ) );
button.setSelected(true);
button.setBorder(BorderFactory.createEmptyBorder(1, 1, 1, 3));
} else {
final RGBColorSelection cs =
new RGBColorSelection(p, false);
final byte[] systemColor = new byte[3];
ts.doTransform(
new byte[]{
(byte)(0xff * cs.red),
(byte)(0xff * cs.green),
(byte)(0xff * cs.blue)
},
systemColor
);
final Color color = new Color(0xff & systemColor[0],
0xff & systemColor[1],
0xff & systemColor[2]);
final ColorScheme colorScheme = new LightZoneSkin.CustomColorScheme(color);
final SubstanceTheme theme = LightZoneSkin.makeTheme(colorScheme, p.name());
button.putClientProperty(SubstanceLookAndFeel.THEME_PROPERTY, theme);
button.putClientProperty(SubstanceLookAndFeel.PAINT_ACTIVE_PROPERTY, Boolean.TRUE);
button.setBorder(BorderFactory.createEmptyBorder(1, 2, 1, 3));
}
button.addItemListener(
new ItemListener() {
public void itemStateChanged( ItemEvent ie ) {
if ( ie.getStateChange() == ItemEvent.SELECTED ) {
final ColorButton b =
(ColorButton)ie.getItem();
selectPreset( b.m_preset );
}
}
}
);
}
}
add(Box.createHorizontalGlue());
}
RGBColorSelectionPreset getSelectedItem() {
final ColorButton selection = (ColorButton) getSelection(group);
if (selection == null)
return RGBColorSelectionPreset.SampledColors;
return selection.m_preset;
}
void setSelectedItem( RGBColorSelectionPreset p ) {
if ( !p.equals( RGBColorSelectionPreset.SampledColors ) ) {
final Enumeration e = group.getElements();
while ( e.hasMoreElements() ) {
final ColorButton b = (ColorButton)e.nextElement();
if ( b.m_preset.equals( p ) ) {
b.setSelected( true );
break;
}
}
} else {
final ColorButton selection = (ColorButton)getSelection( group );
if ( selection != null ) {
group.remove( selection );
selection.setSelected( false );
group.add( selection );
}
}
}
}
ColorSelectionControls( OpControl control, PropertyChangeSupport pcs ) {
super(BoxLayout.X_AXIS);
this.pcs = pcs;
m_op = control.getOperation();
m_undoSupport = control.undoSupport;
m_hueEnabled = new JCheckBox();
m_hueEnabled.setFocusable( false );
m_hueEnabled.setSelected( true );
m_hueEnabled.setToolTipText(
LOCALE.get( "HueDisableToolTip" )
);
m_hueEnabled.addItemListener(
new EnablerListener(
LOCALE.get( "HueDisableToolTip" ),
LOCALE.get( "HueEnableToolTip" )
)
);
m_luminosityEnabled = new JCheckBox();
m_luminosityEnabled.setFocusable( false );
m_luminosityEnabled.setSelected( true );
m_luminosityEnabled.setToolTipText(
LOCALE.get( "BrightnessDisableToolTip" )
);
m_luminosityEnabled.addItemListener(
new EnablerListener(
LOCALE.get( "BrightnessDisableToolTip" ),
LOCALE.get( "BrightnessEnableToolTip" )
)
);
final LocalListener localListener = new LocalListener();
m_colorSwatch = new ColorSwatch( COLOR_SWATCH_DEFAULT );
final JLabel colorRadiusLabel =
new JLabel( LOCALE.get( "ColorRangeLabel" ) + ':' );
colorRadiusLabel.setBackground( OpControl.Background );
colorRadiusLabel.setFocusable( false );
colorRadiusLabel.setFont( LightZoneSkin.fontSet.getSmallFont() );
m_colorRangeSlider = new JSlider(
SLIDER_RADIUS_MIN, SLIDER_RADIUS_MAX,
COLOR_RADIUS_DEFAULT
);
m_colorRangeSlider.setBackground( OpControl.Background );
m_colorRangeSlider.setFocusable( false );
m_colorRangeSlider.setUI( new LCSliderUI( m_colorRangeSlider ) );
m_colorRangeSlider.setToolTipText( LOCALE.get( "ColorRangeToolTip" ) );
m_colorRangeSlider.addChangeListener( localListener );
m_colorRangeSlider.addMouseListener( localListener );
m_colorPresets = new ColorPresets();
m_luminositySelector = new RangeSelector(
SELECTOR_LUMINOSITY_MIN, SELECTOR_LUMINOSITY_MAX
);
m_luminositySelector.setTrack( new RangeSelectorZoneTrack() );
m_luminositySelector.addChangeListener( localListener );
m_luminositySelector.addMouseListener( localListener );
m_luminositySelector.setToolTipText(
LOCALE.get( "BrightnessSelectorToolTip" )
);
initDropper( control );
m_invertSelection = new JCheckBox(
LOCALE.get( "InvertColorSelectionLabel" )
);
m_invertSelection.setFont(LightZoneSkin.fontSet.getSmallFont());
m_invertSelection.setFocusable( false );
m_invertSelection.setToolTipText(
LOCALE.get( "InvertColorSelectionEnableToolTip" )
);
m_invertSelection.addItemListener(
new EnablerListener(
LOCALE.get( "InvertColorSelectionDisableToolTip" ),
LOCALE.get( "InvertColorSelectionEnableToolTip" ),
LOCALE.get( "InvertColorSelectionEditName" ),
LOCALE.get( "NormalColorSelectionEditName" )
)
);
final ResetColorSelectionButton resetButton =
new ResetColorSelectionButton(
new ActionListener() {
public void actionPerformed( ActionEvent ae ) {
resetColorSelection();
}
}
);
setBackground( LightZoneSkin.Colors.ToolPanesBackground );
////////// Color controls /////////////////////////////////////////////
final JPanel presets = new JPanel();
presets.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
presets.setLayout(
new BoxLayout( presets, BoxLayout.X_AXIS )
);
presets.add( Box.createHorizontalStrut( 10 ) );
presets.add( m_colorPresets );
presets.add( Box.createHorizontalGlue() );
final JPanel radius = new JPanel();
radius.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
radius.setLayout( new BoxLayout( radius, BoxLayout.X_AXIS ) );
radius.add( Box.createHorizontalStrut( 5 ) );
radius.add( colorRadiusLabel );
radius.add( m_colorRangeSlider );
final JPanel presetsAndRadius = new JPanel();
presetsAndRadius.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
presetsAndRadius.setLayout(
new BoxLayout( presetsAndRadius, BoxLayout.Y_AXIS )
);
presetsAndRadius.add( Box.createVerticalStrut( 10 ) );
presetsAndRadius.add( presets );
presetsAndRadius.add( radius );
final JPanel colorControls = new JPanel();
colorControls.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
colorControls.setLayout(
new BoxLayout( colorControls, BoxLayout.X_AXIS )
);
colorControls.add( m_hueEnabled );
colorControls.add( m_colorSwatch );
colorControls.add( presetsAndRadius );
////////// Luminosity controls ////////////////////////////////////////
final JPanel luminosityControls = new JPanel();
luminosityControls.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
luminosityControls.setLayout(
new BoxLayout( luminosityControls, BoxLayout.X_AXIS )
);
luminosityControls.add( m_luminosityEnabled );
luminosityControls.add( m_luminositySelector );
////////// Color & Luminosity /////////////////////////////////////////
final JPanel colorAndLuminosity = new JPanel();
colorAndLuminosity.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
colorAndLuminosity.setLayout(
new BoxLayout( colorAndLuminosity, BoxLayout.Y_AXIS )
);
colorAndLuminosity.add( colorControls );
colorAndLuminosity.add( luminosityControls );
////////// Reset & Dropper ////////////////////////////////////////////
final JPanel resetAndDropper = new JPanel();
resetAndDropper.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
resetAndDropper.setLayout(
new BoxLayout( resetAndDropper, BoxLayout.X_AXIS )
);
resetAndDropper.add( m_dropperButton );
resetAndDropper.add( resetButton );
////////// Invert, Reset & Dropper ////////////////////////////////////
final JPanel invertEtc = new JPanel();
invertEtc.setBackground( LightZoneSkin.Colors.ToolPanesBackground );
invertEtc.setLayout( new BoxLayout( invertEtc, BoxLayout.Y_AXIS ) );
invertEtc.add( Box.createVerticalGlue() );
invertEtc.add( resetAndDropper );
invertEtc.add( Box.createVerticalStrut(8) );
invertEtc.add( m_invertSelection );
m_invertSelection.setAlignmentX( Component.LEFT_ALIGNMENT );
resetAndDropper.setAlignmentX( Component.LEFT_ALIGNMENT );
////////// This component itself //////////////////////////////////////
add( invertEtc );
add( Box.createHorizontalStrut(4) );
add( colorAndLuminosity );
add( Box.createHorizontalGlue() );
m_currentEdit = new ColorSelectionEdit();
}
void operationChanged( Operation op ) {
m_op = op;
}
void save( XmlNode node ) {
final RGBColorSelection cs = m_op.getColorSelection();
final XmlNode colorNode = node.addChild( ColorSelectionKey );
colorNode.setAttribute(
HueRedKey, Float.toString( cs.red )
);
colorNode.setAttribute(
HueGreenKey, Float.toString( cs.green )
);
colorNode.setAttribute(
HueBlueKey, Float.toString( cs.blue )
);
colorNode.setAttribute(
HueRadiusKey, Float.toString( cs.radius )
);
colorNode.setAttribute(
HueEnabledKey, Boolean.toString( cs.isColorEnabled )
);
colorNode.setAttribute(
LuminosityLowerKey,
Float.toString( cs.luminosityLower )
);
colorNode.setAttribute(
LuminosityLowerFeatherKey,
Float.toString( cs.luminosityLowerFeather )
);
colorNode.setAttribute(
LuminosityUpperKey,
Float.toString( cs.luminosityUpper )
);
colorNode.setAttribute(
LuminosityUpperFeatherKey,
Float.toString( cs.luminosityUpperFeather )
);
colorNode.setAttribute(
LuminosityEnabledKey,
Boolean.toString( cs.isLuminosityEnabled )
);
colorNode.setAttribute(
InvertedKey,
Boolean.toString( cs.isInverted )
);
}
void restore( XmlNode node ) throws XMLException {
if ( node.hasChild( ColorSelectionKey ) ) {
node = node.getChild( ColorSelectionKey );
try {
final float red = Float.parseFloat(
node.getAttribute( HueRedKey )
);
final float green = Float.parseFloat(
node.getAttribute( HueGreenKey )
);
final float blue = Float.parseFloat(
node.getAttribute( HueBlueKey )
);
final float radius = Float.parseFloat(
node.getAttribute( HueRadiusKey )
);
final boolean isHueEnabled = Boolean.parseBoolean(
node.getAttribute( HueEnabledKey )
);
final float blv = Float.parseFloat(
node.getAttribute( LuminosityLowerKey )
);
final float blfv = Float.parseFloat(
node.getAttribute( LuminosityLowerFeatherKey )
);
final float buv = Float.parseFloat(
node.getAttribute( LuminosityUpperKey )
);
final float bufv = Float.parseFloat(
node.getAttribute( LuminosityUpperFeatherKey )
);
final boolean isLuminosityEnabled = Boolean.parseBoolean(
node.getAttribute( LuminosityEnabledKey )
);
final boolean isInverted = Boolean.parseBoolean(
node.getAttribute( InvertedKey )
);
final RGBColorSelection cs = new RGBColorSelection(
red, green, blue, radius, blv, blfv, buv, bufv,
isInverted, isHueEnabled, isLuminosityEnabled
);
colorSelectionToControls( cs, false );
m_op.setColorSelection( cs );
m_currentEdit = new ColorSelectionEdit();
}
catch ( IllegalArgumentException e ) {
throw new XMLException( e );
}
}
}
////////// private ////////////////////////////////////////////////////////
private class ColorSelectionEdit extends AbstractUndoableEdit {
ColorSelectionEdit() {
m_beforeHueModel = controlsToColorSelection();
}
void end( String name ) {
m_name = name;
m_afterHueModel = controlsToColorSelection();
}
public String getPresentationName() {
return m_name;
}
public void undo() {
super.undo();
colorSelectionToControls( m_beforeHueModel, true );
m_op.setColorSelection( m_beforeHueModel );
m_currentEdit = new ColorSelectionEdit();
}
public void redo() {
super.redo();
colorSelectionToControls( m_afterHueModel, true );
m_op.setColorSelection( m_afterHueModel );
}
private RGBColorSelection m_afterHueModel;
private final RGBColorSelection m_beforeHueModel;
private String m_name = "";
}
// Handle ColorSelection updates and undo for events coming from the
// enable/disable checkboxes.
private final class EnablerListener implements ItemListener {
public void itemStateChanged( ItemEvent ie ) {
if ( !m_isUpdatingControls ) {
m_op.setColorSelection( controlsToColorSelection() );
final JComponent comp = (JComponent)ie.getSource();
if ( ie.getStateChange() == ItemEvent.SELECTED ) {
postEdit( m_enabledEditName );
comp.setToolTipText( m_selectedTip );
} else {
postEdit( m_disabledEditName );
comp.setToolTipText( m_unselectedTip );
}
}
}
EnablerListener( String selectedTip, String unselectedTip ) {
this(
selectedTip, unselectedTip,
LOCALE.get( "ColorSelectorEnabledEditName" ),
LOCALE.get( "ColorSelectorDisabledEditName" )
);
}
EnablerListener( String selectedTip, String unselectedTip,
String enabledEditName, String disabledEditName ) {
m_enabledEditName = enabledEditName;
m_disabledEditName = disabledEditName;
m_selectedTip = selectedTip;
m_unselectedTip = unselectedTip;
}
private final String m_enabledEditName, m_disabledEditName;
private final String m_selectedTip, m_unselectedTip;
}
/**
* This is listener for both the color feathering radius slider and the
* luminosity selector.
*/
private final class LocalListener
extends MouseAdapter implements ChangeListener
{
public void mousePressed( MouseEvent me ) {
m_op.changeBatchStarted();
}
public void mouseReleased( MouseEvent me ) {
m_op.changeBatchEnded();
postEdit( LOCALE.get( "ColorSelectorEditName" ) );
}
public void stateChanged( ChangeEvent ce ) {
if ( !m_isUpdatingControls ) {
final RGBColorSelection cs = controlsToColorSelection();
m_lowerLuminosityFeather = cs.luminosityLowerFeather;
m_upperLuminosityFeather = cs.luminosityUpperFeather;
m_op.setColorSelection( cs );
m_prevCS = cs;
}
}
}
/**
* Sets the values of the color selection controls from the given
* {@link ColorSelection}.
*
* @param cs The {@link ColorSelection}.
* @param force If <code>true</code>, force updating of controls.
* (This is used during undo/redo.)
* @see #controlsToColorSelection()
*/
private void colorSelectionToControls( RGBColorSelection cs,
boolean force ) {
m_isUpdatingControls = true;
try {
if ( m_hueEnabled.isSelected() && cs.isColorEnabled || force ) {
m_colorSwatch.setColor( cs.toColor() );
final int radius;
if ( cs.getPreset() == RGBColorSelectionPreset.AllColors )
radius = COLOR_RADIUS_DEFAULT;
else
radius = (int)(cs.radius * SLIDER_RADIUS_MAX);
m_colorRangeSlider.setValue( radius );
m_hueEnabled.setSelected( cs.isColorEnabled );
}
if ( m_luminosityEnabled.isSelected() && cs.isLuminosityEnabled ||
force ) {
final float cblv = cs.luminosityLower;
final float cblfv = cs.luminosityLowerFeather;
final float cbuv = cs.luminosityUpper;
final float cbufv = cs.luminosityUpperFeather;
final int bMin = m_luminositySelector.getMinimumThumbValue();
final int bMax = m_luminositySelector.getMaximumThumbValue();
final int bWidth = bMax - bMin;
m_luminositySelector.setProperties(
bMin, bMax,
(int)(cblv * bWidth), (int)((cblv - cblfv) * bWidth),
(int)(cbuv * bWidth), (int)((cbuv + cbufv) * bWidth),
m_luminositySelector.getMinimumTrackValue(),
m_luminositySelector.getMaximumTrackValue(),
m_luminositySelector.getTrackValue(),
m_luminositySelector.getTrackValueWraps()
);
m_luminosityEnabled.setSelected( cs.isLuminosityEnabled );
}
m_invertSelection.setSelected( cs.isInverted );
m_skipSelectPresetCode = true;
m_colorPresets.setSelectedItem( cs.getPreset() );
m_skipSelectPresetCode = false;
if (cs.isAllSelected())
pcs.firePropertyChange(
COLOR_SELECTION, Boolean.TRUE, Boolean.FALSE
);
else
pcs.firePropertyChange(
COLOR_SELECTION, Boolean.FALSE, Boolean.TRUE
);
}
finally {
m_isUpdatingControls = false;
}
}
/**
* Converts the current values of the color selection controls into a
* {@link ColorSelection} that is used by the imaging engine.
*
* @return Returns a {@link ColorSelection} that represents the current
* values of the color selection controls.
* @see #colorSelectionToControls(RGBColorSelection,boolean)
*/
private RGBColorSelection controlsToColorSelection() {
final Color c = m_colorSwatch.getColor();
final int hMin = m_colorRangeSlider.getMinimum();
final int hMax = m_colorRangeSlider.getMaximum();
final float hWidth = hMax - hMin;
final float radius = m_colorPresets.getSelectedItem() == RGBColorSelectionPreset.AllColors ? -1 : m_colorRangeSlider.getValue() / hWidth;
final int lMin = m_luminositySelector.getMinimumThumbValue();
final int lMax = m_luminositySelector.getMaximumThumbValue();
final float lWidth = lMax - lMin;
final int llv = m_luminositySelector.getLowerThumbValue();
final int llfv = m_luminositySelector.getLowerThumbFeatheringValue();
final int luv = m_luminositySelector.getUpperThumbValue();
final int lufv = m_luminositySelector.getUpperThumbFeatheringValue();
final float cllv = llv / lWidth;
final float cllfv = (llv - llfv) / lWidth;
final float cluv = luv / lWidth;
final float clufv = (lufv - luv) / lWidth;
final RGBColorSelection cs = new RGBColorSelection(
c.getRed() / 255F, c.getGreen() / 255F, c.getBlue() / 255F,
radius,
cllv, cllfv, cluv, clufv,
m_invertSelection.isSelected(),
m_hueEnabled.isSelected(), m_luminosityEnabled.isSelected()
);
if ( cs.isAllSelected() )
pcs.firePropertyChange(
COLOR_SELECTION, Boolean.TRUE, Boolean.FALSE
);
else
pcs.firePropertyChange(
COLOR_SELECTION, Boolean.FALSE, Boolean.TRUE
);
return cs;
}
public ComboFrame getComboFrame() {
return (ComboFrame)SwingUtilities.getAncestorOfClass(
ComboFrame.class, this
);
}
private void initDropper( final OpControl control ) {
m_dropperButton = new DropperButton();
m_dropperButton.setToolTips(
ColorSelectStartToolTip, ColorSelectEndToolTip
);
m_dropperButton.addItemListener(
new ItemListener() {
public void itemStateChanged( ItemEvent ie ) {
if ( ie.getStateChange() == ItemEvent.SELECTED ) {
getComboFrame().getEditor().setMode( EditorMode.ARROW );
control.notifyListenersEnterMode( m_dropperMode );
} else if ( !m_isDropperModeCancelling )
control.notifyListenersExitMode( m_dropperMode );
}
}
);
m_dropperMode = new DropperMode( control );
m_dropperMode.addListener(
new DropperMode.Listener() {
public void pointSelected( Point2D p ) {
selectColorAt( p );
}
public void modeCancelled() {
// Reset the toggle button, without firing notifications:
m_isDropperModeCancelling = true;
m_dropperButton.setSelected( false );
m_isDropperModeCancelling = false;
}
}
);
}
private RGBColorSelection mergeColorSelections( RGBColorSelection cs ) {
if ( cs.getPreset() != RGBColorSelectionPreset.AllColors ) {
final float radius;
final float luminosityLowerFeather;
final float luminosityUpperFeather;
if ( m_prevCS != null ) {
radius = m_prevCS.radius;
luminosityLowerFeather =
Math.min( cs.luminosityLower, m_lowerLuminosityFeather );
luminosityUpperFeather =
Math.min( 1-cs.luminosityUpper, m_upperLuminosityFeather );
} else {
radius = cs.radius;
luminosityLowerFeather =
Math.min( cs.luminosityLower, cs.luminosityLowerFeather );
luminosityUpperFeather =
Math.min( 1-cs.luminosityUpper, cs.luminosityUpperFeather );
}
cs = new RGBColorSelection(
cs.red, cs.green, cs.blue, radius,
cs.luminosityLower, luminosityLowerFeather,
cs.luminosityUpper, luminosityUpperFeather,
false,
m_hueEnabled.isSelected(),
m_luminosityEnabled.isSelected()
);
m_prevCS = cs;
m_lowerLuminosityFeather = cs.luminosityLowerFeather;
m_upperLuminosityFeather = cs.luminosityUpperFeather;
}
return cs;
}
private void postEdit( String name ) {
m_currentEdit.end( name );
m_undoSupport.postEdit( m_currentEdit );
m_currentEdit = new ColorSelectionEdit();
}
private void resetColorSelection() {
m_isUpdatingControls = true;
m_op.changeBatchStarted();
boolean undoable = false;
try {
if ( m_hueEnabled.isSelected() ) {
m_prevCS = null;
m_colorPresets.setSelectedItem( RGBColorSelectionPreset.AllColors );
undoable = true;
}
if ( m_luminosityEnabled.isSelected() ) {
m_luminositySelector.setProperties(
m_luminositySelector.getMinimumThumbValue(),
m_luminositySelector.getMaximumThumbValue(),
m_luminositySelector.getMinimumThumbValue(),
m_luminositySelector.getMinimumThumbValue(),
m_luminositySelector.getMaximumThumbValue(),
m_luminositySelector.getMaximumThumbValue(),
m_luminositySelector.getMinimumTrackValue(),
m_luminositySelector.getMaximumTrackValue(),
m_luminositySelector.getTrackValue(),
m_luminositySelector.getTrackValueWraps()
);
undoable = true;
}
if ( undoable )
postEdit( LOCALE.get( "ResetColorSelectionEditName" ) );
}
finally {
m_op.changeBatchEnded();
m_isUpdatingControls = false;
}
}
private void selectColorAt( Point2D p ) {
RGBColorSelection cs = m_op.getColorSelectionAt( p );
cs = mergeColorSelections( cs );
m_op.setColorSelection( cs );
colorSelectionToControls( cs, false );
postEdit( LOCALE.get( "ColorDropperEditName" ) );
}
private void selectPreset( RGBColorSelectionPreset p ) {
if ( !m_skipSelectPresetCode ) {
final boolean wasUpdatingControls = m_isUpdatingControls;
m_skipSelectPresetCode = true;
m_isUpdatingControls = true;
m_hueEnabled.setSelected( true );
RGBColorSelection cs =
new RGBColorSelection( p, m_invertSelection.isSelected() );
//cs = mergeColorSelections( cs );
m_op.setColorSelection( cs );
colorSelectionToControls( cs, false );
m_skipSelectPresetCode = false;
if ( !wasUpdatingControls )
postEdit(
LOCALE.get( "SelectColorPresetEditName", p.toString() )
);
m_prevCS = null;
m_isUpdatingControls = wasUpdatingControls;
}
}
private boolean m_skipSelectPresetCode;
private final ColorSwatch m_colorSwatch;
private final JSlider m_colorRangeSlider;
private final ColorPresets m_colorPresets;
private ColorSelectionEdit m_currentEdit;
private DropperButton m_dropperButton;
private DropperMode m_dropperMode;
private final JCheckBox m_invertSelection;
private Operation m_op;
private final RangeSelector m_luminositySelector;
private float m_lowerLuminosityFeather, m_upperLuminosityFeather;
private RGBColorSelection m_prevCS;
private JCheckBox m_hueEnabled;
private JCheckBox m_luminosityEnabled;
// Flag dropper button state changes that just synchronize the button
// when the dropper Mode is externally cancelled, so OpControlModeListener
// notifications won't fire:
private boolean m_isDropperModeCancelling;
// Disable callbacks from the selectors and the checkboxes when these
// controls are being slewed to a new ColorSelection.
private boolean m_isUpdatingControls;
private OpControl.OpControlUndoSupport m_undoSupport;
private final static String ColorSelectStartToolTip =
LOCALE.get( "ColorSelectStartToolTip" );
private final static String ColorSelectEndToolTip =
LOCALE.get( "ColorSelectEndToolTip" );
private final static String ColorSelectionKey = "ColorSelection";
private final static String HueRedKey = "HueRed";
private final static String HueGreenKey = "HueGreen";
private final static String HueBlueKey = "HueBlue";
private final static String HueRadiusKey = "HueRadius";
private final static String HueEnabledKey = "HueEnabled";
private final static String LuminosityLowerKey = "LuminosityLower";
private final static String LuminosityUpperKey = "LuminosityUpper";
private final static String LuminosityLowerFeatherKey = "LuminosityLowerFeather";
private final static String LuminosityUpperFeatherKey = "LuminosityUpperFeather";
private final static String LuminosityEnabledKey = "LuminosityEnabled";
private final static String InvertedKey = "Inverted";
private static final int SLIDER_RADIUS_MIN = 0;
private static final int SLIDER_RADIUS_MAX = 100;
private static final int SELECTOR_LUMINOSITY_MIN = 0;
private static final int SELECTOR_LUMINOSITY_MAX = 1000;
private static final Color COLOR_SWATCH_DEFAULT = Color.GRAY;
private static final int COLOR_RADIUS_DEFAULT = 20;
}
/* vim:set et sw=4 ts=4: */
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.io;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.matrix.data.DenseBlock;
import org.apache.sysml.runtime.matrix.data.InputInfo;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.util.FastStringTokenizer;
public class ReaderTextCell extends MatrixReader
{
private boolean _isMMFile = false;
public ReaderTextCell(InputInfo info)
{
_isMMFile = (info == InputInfo.MatrixMarketInputInfo);
}
@Override
public MatrixBlock readMatrixFromHDFS(String fname, long rlen, long clen, int brlen, int bclen, long estnnz)
throws IOException, DMLRuntimeException
{
//allocate output matrix block
MatrixBlock ret = createOutputMatrixBlock(rlen, clen, (int)rlen, (int)clen, estnnz, true, false);
//prepare file access
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path( fname );
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
//check existence and non-empty file
checkValidInputFile(fs, path);
//core read
if( fs.isDirectory(path) )
readTextCellMatrixFromHDFS(path, job, ret, rlen, clen, brlen, bclen);
else
readRawTextCellMatrixFromHDFS(path, job, fs, ret, rlen, clen, brlen, bclen, _isMMFile);
//finally check if change of sparse/dense block representation required
if( !ret.isInSparseFormat() )
ret.recomputeNonZeros();
ret.examSparsity();
return ret;
}
@Override
public MatrixBlock readMatrixFromInputStream(InputStream is, long rlen, long clen, int brlen, int bclen, long estnnz)
throws IOException, DMLRuntimeException
{
//allocate output matrix block
MatrixBlock ret = createOutputMatrixBlock(rlen, clen, brlen, bclen, estnnz, true, false);
//core read
readRawTextCellMatrixFromInputStream(is, ret, rlen, clen, brlen, bclen, _isMMFile);
//finally check if change of sparse/dense block representation required
if( !ret.isInSparseFormat() )
ret.recomputeNonZeros();
ret.examSparsity();
return ret;
}
private static void readTextCellMatrixFromHDFS( Path path, JobConf job, MatrixBlock dest, long rlen, long clen, int brlen, int bclen )
throws IOException
{
boolean sparse = dest.isInSparseFormat();
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
InputSplit[] splits = informat.getSplits(job, 1);
LongWritable key = new LongWritable();
Text value = new Text();
int row = -1;
int col = -1;
try
{
FastStringTokenizer st = new FastStringTokenizer(' ');
for(InputSplit split: splits)
{
RecordReader<LongWritable,Text> reader = informat.getRecordReader(split, job, Reporter.NULL);
try
{
if( sparse ) //SPARSE<-value
{
while( reader.next(key, value) ) {
st.reset( value.toString() ); //reinit tokenizer
row = st.nextInt() - 1;
col = st.nextInt() - 1;
if(row == -1 || col == -1) continue;
double lvalue = st.nextDouble();
dest.appendValue(row, col, lvalue);
}
dest.sortSparseRows();
}
else //DENSE<-value
{
DenseBlock a = dest.getDenseBlock();
while( reader.next(key, value) ) {
st.reset( value.toString() ); //reinit tokenizer
row = st.nextInt()-1;
col = st.nextInt()-1;
if(row == -1 || col == -1) continue;
double lvalue = st.nextDouble();
a.set( row, col, lvalue );
}
}
}
finally {
IOUtilFunctions.closeSilently(reader);
}
}
}
catch(Exception ex) {
//post-mortem error handling and bounds checking
if( row < 0 || row + 1 > rlen || col < 0 || col + 1 > clen )
throw new IOException("Matrix cell ["+(row+1)+","+(col+1)+"] " +
"out of overall matrix range [1:"+rlen+",1:"+clen+"].");
else
throw new IOException( "Unable to read matrix in text cell format.", ex );
}
}
private static void readRawTextCellMatrixFromHDFS( Path path, JobConf job, FileSystem fs, MatrixBlock dest, long rlen, long clen, int brlen, int bclen, boolean matrixMarket )
throws IOException
{
//create input stream for path
InputStream inputStream = fs.open(path);
//actual read
readRawTextCellMatrixFromInputStream(inputStream, dest, rlen, clen, brlen, bclen, matrixMarket);
}
private static void readRawTextCellMatrixFromInputStream( InputStream is, MatrixBlock dest, long rlen, long clen, int brlen, int bclen, boolean matrixMarket )
throws IOException
{
BufferedReader br = new BufferedReader(new InputStreamReader( is ));
boolean sparse = dest.isInSparseFormat();
String value = null;
int row = -1;
int col = -1;
// Read the header lines, if reading from a matrixMarket file
if ( matrixMarket ) {
value = br.readLine(); // header line
if ( value==null || !value.startsWith("%%") ) {
throw new IOException("Error while reading file in MatrixMarket format. Expecting a header line, but encountered, \"" + value +"\".");
}
// skip until end-of-comments
while( (value = br.readLine())!=null && value.charAt(0) == '%' ) {
//do nothing just skip comments
}
// the first line after comments is the one w/ matrix dimensions
// validate (rlen clen nnz)
String[] fields = value.trim().split("\\s+");
long mm_rlen = Long.parseLong(fields[0]);
long mm_clen = Long.parseLong(fields[1]);
if ( rlen != mm_rlen || clen != mm_clen ) {
throw new IOException("Unexpected matrix dimensions while reading file in MatrixMarket format. Expecting dimensions [" + rlen + " rows, " + clen + " cols] but encountered [" + mm_rlen + " rows, " + mm_clen + "cols].");
}
}
try
{
FastStringTokenizer st = new FastStringTokenizer(' ');
if( sparse ) //SPARSE<-value
{
while( (value=br.readLine())!=null )
{
st.reset( value ); //reinit tokenizer
row = st.nextInt()-1;
col = st.nextInt()-1;
if(row == -1 || col == -1) continue;
double lvalue = st.nextDouble();
dest.appendValue(row, col, lvalue);
}
dest.sortSparseRows();
}
else //DENSE<-value
{
DenseBlock a = dest.getDenseBlock();
while( (value=br.readLine())!=null ) {
st.reset( value ); //reinit tokenizer
row = st.nextInt()-1;
col = st.nextInt()-1;
if(row == -1 || col == -1) continue;
double lvalue = st.nextDouble();
a.set( row, col, lvalue );
}
}
}
catch(Exception ex) {
//post-mortem error handling and bounds checking
if( row < 0 || row + 1 > rlen || col < 0 || col + 1 > clen )
throw new IOException("Matrix cell ["+(row+1)+","+(col+1)+"] " +
"out of overall matrix range [1:"+rlen+",1:"+clen+"].", ex);
else
throw new IOException( "Unable to read matrix in raw text cell format.", ex );
}
finally {
IOUtilFunctions.closeSilently(br);
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.connect.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/connect-2017-08-08/ListLexBots" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListLexBotsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The names and Regions of the Amazon Lex bots associated with the specified instance.
* </p>
*/
private java.util.List<LexBot> lexBots;
/**
* <p>
* If there are additional results, this is the token for the next set of results.
* </p>
*/
private String nextToken;
/**
* <p>
* The names and Regions of the Amazon Lex bots associated with the specified instance.
* </p>
*
* @return The names and Regions of the Amazon Lex bots associated with the specified instance.
*/
public java.util.List<LexBot> getLexBots() {
return lexBots;
}
/**
* <p>
* The names and Regions of the Amazon Lex bots associated with the specified instance.
* </p>
*
* @param lexBots
* The names and Regions of the Amazon Lex bots associated with the specified instance.
*/
public void setLexBots(java.util.Collection<LexBot> lexBots) {
if (lexBots == null) {
this.lexBots = null;
return;
}
this.lexBots = new java.util.ArrayList<LexBot>(lexBots);
}
/**
* <p>
* The names and Regions of the Amazon Lex bots associated with the specified instance.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setLexBots(java.util.Collection)} or {@link #withLexBots(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param lexBots
* The names and Regions of the Amazon Lex bots associated with the specified instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListLexBotsResult withLexBots(LexBot... lexBots) {
if (this.lexBots == null) {
setLexBots(new java.util.ArrayList<LexBot>(lexBots.length));
}
for (LexBot ele : lexBots) {
this.lexBots.add(ele);
}
return this;
}
/**
* <p>
* The names and Regions of the Amazon Lex bots associated with the specified instance.
* </p>
*
* @param lexBots
* The names and Regions of the Amazon Lex bots associated with the specified instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListLexBotsResult withLexBots(java.util.Collection<LexBot> lexBots) {
setLexBots(lexBots);
return this;
}
/**
* <p>
* If there are additional results, this is the token for the next set of results.
* </p>
*
* @param nextToken
* If there are additional results, this is the token for the next set of results.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* If there are additional results, this is the token for the next set of results.
* </p>
*
* @return If there are additional results, this is the token for the next set of results.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* If there are additional results, this is the token for the next set of results.
* </p>
*
* @param nextToken
* If there are additional results, this is the token for the next set of results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListLexBotsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getLexBots() != null)
sb.append("LexBots: ").append(getLexBots()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListLexBotsResult == false)
return false;
ListLexBotsResult other = (ListLexBotsResult) obj;
if (other.getLexBots() == null ^ this.getLexBots() == null)
return false;
if (other.getLexBots() != null && other.getLexBots().equals(this.getLexBots()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getLexBots() == null) ? 0 : getLexBots().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListLexBotsResult clone() {
try {
return (ListLexBotsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2009 Andrey Khalzov, and individual contributors as indicated by the @author tag.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
/**
* Created by Andrey Khalzov
* 18.12.2008 20:35:32
*/
package org.wannatrak.client.layout;
import org.wannatrak.client.MainWidget;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwt.user.client.ui.PushButton;
public abstract class AbstractMainWidgetLayout implements MainWidgetLayout {
public static final int LABEL_ROW = 0;
public static final int WIDGET_ROW = 1;
public static final int BOTTOM_ROW = 2;
public void setLeftWidgetLabel(MainWidget mainWidget, Label leftWidgetLabel, String cellStyleName) {
setLeftWidgetLabel(mainWidget, leftWidgetLabel);
mainWidget.getCellFormatter().setStyleName(getLeftWidgetLabelRow(), getLeftWidgetLabelColumn(), cellStyleName);
}
public void setLeftWidgetLabel(MainWidget mainWidget, Label leftWidgetLabel) {
mainWidget.setWidget(getLeftWidgetLabelRow(), getLeftWidgetLabelColumn(), leftWidgetLabel);
}
public void setRightWidgetLabel(MainWidget mainWidget, Label rightWidgetLabel, String cellStyleName) {
setRightWidgetLabel(mainWidget, rightWidgetLabel);
mainWidget.getCellFormatter().setStyleName(getRightWidgetLabelRow(), getRightWidgetLabelColumn(), cellStyleName);
}
public void setRightWidgetLabel(MainWidget mainWidget, Label rightWidgetLabel) {
mainWidget.setWidget(getRightWidgetLabelRow(), getRightWidgetLabelColumn(), rightWidgetLabel);
}
public void setCenterWidgetHeader(MainWidget mainWidget, Widget centerWidgetHeader, String cellStyleName) {
setCenterWidgetHeader(mainWidget, centerWidgetHeader);
mainWidget.getCellFormatter().setStyleName(getCenterWidgetHeaderRow(), getCenterWidgetHeaderColumn(), cellStyleName);
}
public void setCenterWidgetHeader(MainWidget mainWidget, Widget centerWidgetHeader) {
mainWidget.setWidget(getCenterWidgetHeaderRow(), getCenterWidgetHeaderColumn(), centerWidgetHeader);
}
public void setLeftWidget(MainWidget mainWidget, Widget leftWidget, String cellStyleName) {
setLeftWidget(mainWidget, leftWidget);
mainWidget.getCellFormatter().setStyleName(getLeftWidgetRow(), getLeftWidgetColumn(), cellStyleName);
}
public void setLeftWidget(MainWidget mainWidget, Widget leftWidget) {
mainWidget.setWidget(getLeftWidgetRow(), getLeftWidgetColumn(), leftWidget);
}
public void setCenterWidget(MainWidget mainWidget, Widget centerWidget, String cellStyleName) {
setCenterWidget(mainWidget, centerWidget);
mainWidget.getCellFormatter().setStyleName(getCenterWidgetRow(), getCenterWidgetColumn(), cellStyleName);
}
public void setCenterWidget(MainWidget mainWidget, Widget centerWidget) {
mainWidget.setWidget(getCenterWidgetRow(), getCenterWidgetColumn(), centerWidget);
}
public void setRightWidget(MainWidget mainWidget, Widget rightWidget, String cellStyleName) {
setRightWidget(mainWidget, rightWidget);
mainWidget.getCellFormatter().setStyleName(getRightWidgetRow(), getRightWidgetColumn(), cellStyleName);
}
public void setRightWidget(MainWidget mainWidget, Widget rightWidget) {
mainWidget.setWidget(getRightWidgetRow(), getRightWidgetColumn(), rightWidget);
}
public void setBottomWidget(MainWidget mainWidget, Widget bottomWidget, String cellStyleName) {
setBottomWidget(mainWidget, bottomWidget);
mainWidget.getCellFormatter().setStyleName(getBottomWidgetRow(), getBottomWidgetColumn(), cellStyleName);
}
public void setBottomWidget(MainWidget mainWidget, Widget bottomWidget) {
mainWidget.setWidget(getBottomWidgetRow(), getBottomWidgetColumn(), bottomWidget);
}
public void setLeftHideButton(MainWidget mainWidget, PushButton leftHideButton, String cellStyleName) {
setLeftHideButton(mainWidget, leftHideButton);
mainWidget.getCellFormatter().setStyleName(getLeftHideButtonRow(), getLeftHideButtonColumn(), cellStyleName);
}
public void setLeftHideButton(MainWidget mainWidget, PushButton leftHideButton) {
mainWidget.setWidget(getLeftHideButtonRow(), getLeftHideButtonColumn(), leftHideButton);
}
public void setRightHideButton(MainWidget mainWidget, PushButton rightHideButton, String cellStyleName) {
setRightHideButton(mainWidget, rightHideButton);
mainWidget.getCellFormatter().setStyleName(getRightHideButtonRow(), getRightHideButtonColumn(), cellStyleName);
}
public void setRightHideButton(MainWidget mainWidget, PushButton rightHideButton) {
mainWidget.setWidget(getRightHideButtonRow(), getRightHideButtonColumn(), rightHideButton);
}
public void setLeftShowButton(MainWidget mainWidget, PushButton leftShowButton, String cellStyleName) {
setLeftShowButton(mainWidget, leftShowButton);
mainWidget.getCellFormatter().setStyleName(getLeftShowButtonRow(), getLeftShowButtonColumn(), cellStyleName);
}
public void setLeftShowButton(MainWidget mainWidget, PushButton leftShowButton) {
mainWidget.setWidget(getLeftShowButtonRow(), getLeftShowButtonColumn(), leftShowButton);
}
public void setRightShowButton(MainWidget mainWidget, PushButton rightShowButton, String cellStyleName) {
setRightShowButton(mainWidget, rightShowButton);
mainWidget.getCellFormatter().setStyleName(getRightShowButtonRow(), getRightShowButtonColumn(), cellStyleName);
}
public void setRightShowButton(MainWidget mainWidget, PushButton rightShowButton) {
mainWidget.setWidget(getRightShowButtonRow(), getRightShowButtonColumn(), rightShowButton);
}
protected int getLeftWidgetLabelRow() {
return LABEL_ROW;
}
protected int getLeftWidgetLabelColumn() {
return getLeftWidgetColumn();
}
protected int getLeftWidgetRow() {
return WIDGET_ROW;
}
protected int getLeftWidgetColumn() {
return 0;
}
protected int getCenterWidgetHeaderRow() {
return LABEL_ROW;
}
protected int getCenterWidgetHeaderColumn() {
return getCenterWidgetColumn();
}
protected int getCenterWidgetRow() {
return WIDGET_ROW;
}
protected int getCenterWidgetColumn() {
return getLeftWidgetColumn() + 2;
}
protected int getRightWidgetLabelRow() {
return LABEL_ROW;
}
protected int getRightWidgetLabelColumn() {
return getRightWidgetColumn();
}
protected int getRightWidgetRow() {
return WIDGET_ROW;
}
protected int getRightWidgetColumn() {
return getCenterWidgetColumn() + 2;
}
protected int getBottomWidgetRow() {
return BOTTOM_ROW;
}
protected int getBottomWidgetColumn() {
return getCenterWidgetColumn();
}
protected int getLeftHideButtonRow() {
return LABEL_ROW;
}
protected int getLeftHideButtonColumn() {
return getLeftWidgetColumn() + 1;
}
protected int getLeftShowButtonRow() {
return LABEL_ROW;
}
protected int getLeftShowButtonColumn() {
return getLeftWidgetColumn();
}
protected int getRightHideButtonRow() {
return LABEL_ROW;
}
protected int getRightHideButtonColumn() {
return getRightWidgetColumn() - 1;
}
protected int getRightShowButtonRow() {
return LABEL_ROW;
}
protected int getRightShowButtonColumn() {
return getRightHideButtonColumn();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.validation.entities;
import org.junit.Test;
import junit.framework.Assert;
import org.apache.cassandra.cql3.CQLTester;
import static junit.framework.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class TimestampTest extends CQLTester
{
@Test
public void testNegativeTimestamps() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, v int)");
execute("INSERT INTO %s (k, v) VALUES (?, ?) USING TIMESTAMP ?", 1, 1, -42L);
assertRows(execute("SELECT writetime(v) FROM %s WHERE k = ?", 1),
row(-42L)
);
assertInvalid("INSERT INTO %s (k, v) VALUES (?, ?) USING TIMESTAMP ?", 2, 2, Long.MIN_VALUE);
}
/**
* Test timestmp and ttl
* migrated from cql_tests.py:TestCQL.timestamp_and_ttl_test()
*/
@Test
public void testTimestampTTL() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, c text, d text)");
execute("INSERT INTO %s (k, c) VALUES (1, 'test')");
execute("INSERT INTO %s (k, c) VALUES (2, 'test') USING TTL 400");
Object[][] res = getRows(execute("SELECT k, c, writetime(c), ttl(c) FROM %s"));
Assert.assertEquals(2, res.length);
for (Object[] r : res)
{
assertTrue(r[2] instanceof Integer || r[2] instanceof Long);
if (r[0].equals(1))
assertNull(r[3]);
else
assertTrue(r[3] instanceof Integer || r[2] instanceof Long);
}
// wrap writetime(), ttl() in other functions (test for CASSANDRA-8451)
res = getRows(execute("SELECT k, c, blobAsBigint(bigintAsBlob(writetime(c))), ttl(c) FROM %s"));
Assert.assertEquals(2, res.length);
for (Object[] r : res)
{
assertTrue(r[2] instanceof Integer || r[2] instanceof Long);
if (r[0].equals(1))
assertNull(r[3]);
else
assertTrue(r[3] instanceof Integer || r[2] instanceof Long);
}
res = getRows(execute("SELECT k, c, writetime(c), blobAsInt(intAsBlob(ttl(c))) FROM %s"));
Assert.assertEquals(2, res.length);
for (Object[] r : res)
{
assertTrue(r[2] instanceof Integer || r[2] instanceof Long);
if (r[0].equals(1))
assertNull(r[3]);
else
assertTrue(r[3] instanceof Integer || r[2] instanceof Long);
}
assertInvalid("SELECT k, c, writetime(k) FROM %s");
assertRows(execute("SELECT k, d, writetime(d) FROM %s WHERE k = 1"),
row(1, null, null));
}
/**
* Migrated from cql_tests.py:TestCQL.invalid_custom_timestamp_test()
*/
@Test
public void testInvalidCustomTimestamp() throws Throwable
{
// Conditional updates
createTable("CREATE TABLE %s (k int, v int, PRIMARY KEY (k, v))");
execute("BEGIN BATCH " +
"INSERT INTO %1$s (k, v) VALUES(0, 0) IF NOT EXISTS; " +
"INSERT INTO %1$s (k, v) VALUES(0, 1) IF NOT EXISTS; " +
"APPLY BATCH");
assertInvalid("BEGIN BATCH " +
"INSERT INTO %1$s (k, v) VALUES(0, 2) IF NOT EXISTS USING TIMESTAMP 1; " +
"INSERT INTO %1$s (k, v) VALUES(0, 3) IF NOT EXISTS; " +
"APPLY BATCH");
assertInvalid("BEGIN BATCH " +
"USING TIMESTAMP 1 INSERT INTO %1$s (k, v) VALUES(0, 4) IF NOT EXISTS; " +
"INSERT INTO %1$s (k, v) VALUES(0, 1) IF NOT EXISTS; " +
"APPLY BATCH");
execute("INSERT INTO %s (k, v) VALUES(1, 0) IF NOT EXISTS");
assertInvalid("INSERT INTO %s (k, v) VALUES(1, 1) IF NOT EXISTS USING TIMESTAMP 5");
// Counters
createTable("CREATE TABLE %s (k int PRIMARY KEY, c counter)");
execute("UPDATE %s SET c = c + 1 WHERE k = 0");
assertInvalid("UPDATE %s USING TIMESTAMP 10 SET c = c + 1 WHERE k = 0");
execute("BEGIN COUNTER BATCH " +
"UPDATE %1$s SET c = c + 1 WHERE k = 0; " +
"UPDATE %1$s SET c = c + 1 WHERE k = 0; " +
"APPLY BATCH");
assertInvalid("BEGIN COUNTER BATCH " +
"UPDATE %1$s USING TIMESTAMP 3 SET c = c + 1 WHERE k = 0; " +
"UPDATE %1$s SET c = c + 1 WHERE k = 0; " +
"APPLY BATCH");
assertInvalid("BEGIN COUNTER BATCH " +
"USING TIMESTAMP 3 UPDATE %1$s SET c = c + 1 WHERE k = 0; " +
"UPDATE %1$s SET c = c + 1 WHERE k = 0; " +
"APPLY BATCH");
}
@Test
public void testInsertTimestampWithUnset() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, i int)");
execute("INSERT INTO %s (k, i) VALUES (1, 1) USING TIMESTAMP ?", unset()); // treat as 'now'
}
@Test
public void testTimestampsOnUnsetColumns() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, i int)");
execute("INSERT INTO %s (k, i) VALUES (1, 1) USING TIMESTAMP 1;");
execute("INSERT INTO %s (k) VALUES (2) USING TIMESTAMP 2;");
execute("INSERT INTO %s (k, i) VALUES (3, 3) USING TIMESTAMP 1;");
assertRows(execute("SELECT k, i, writetime(i) FROM %s "),
row(1, 1, 1L),
row(2, null, null),
row(3, 3, 1L));
}
@Test
public void testTimestampsOnUnsetColumnsWide() throws Throwable
{
createTable("CREATE TABLE %s (k int , c int, i int, PRIMARY KEY (k, c))");
execute("INSERT INTO %s (k, c, i) VALUES (1, 1, 1) USING TIMESTAMP 1;");
execute("INSERT INTO %s (k, c) VALUES (1, 2) USING TIMESTAMP 1;");
execute("INSERT INTO %s (k, c, i) VALUES (1, 3, 1) USING TIMESTAMP 1;");
execute("INSERT INTO %s (k, c) VALUES (2, 2) USING TIMESTAMP 2;");
execute("INSERT INTO %s (k, c, i) VALUES (3, 3, 3) USING TIMESTAMP 1;");
assertRows(execute("SELECT k, c, i, writetime(i) FROM %s "),
row(1, 1, 1, 1L),
row(1, 2, null, null),
row(1, 3, 1, 1L),
row(2, 2, null, null),
row(3, 3, 3, 1L));
}
@Test
public void testTimestampAndTTLPrepared() throws Throwable
{
createTable("CREATE TABLE %s (k int , c int, i int, PRIMARY KEY (k, c))");
execute("INSERT INTO %s (k, c, i) VALUES (1, 1, 1) USING TIMESTAMP ? AND TTL ?;", 1L,5);
execute("INSERT INTO %s (k, c) VALUES (1, 2) USING TIMESTAMP ? AND TTL ? ;", 1L, 5);
execute("INSERT INTO %s (k, c, i) VALUES (1, 3, 1) USING TIMESTAMP ? AND TTL ?;", 1L, 5);
execute("INSERT INTO %s (k, c) VALUES (2, 2) USING TIMESTAMP ? AND TTL ?;", 2L, 5);
execute("INSERT INTO %s (k, c, i) VALUES (3, 3, 3) USING TIMESTAMP ? AND TTL ?;", 1L, 5);
assertRows(execute("SELECT k, c, i, writetime(i) FROM %s "),
row(1, 1, 1, 1L),
row(1, 2, null, null),
row(1, 3, 1, 1L),
row(2, 2, null, null),
row(3, 3, 3, 1L));
Thread.sleep(6*1000);
assertEmpty(execute("SELECT k, c, i, writetime(i) FROM %s "));
}
@Test
public void testTimestampAndTTLUpdatePrepared() throws Throwable
{
createTable("CREATE TABLE %s (k int , c int, i int, PRIMARY KEY (k, c))");
execute("UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=1 WHERE k=1 AND c = 1 ;", 1L, 5);
execute("UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=1 WHERE k=1 AND c = 3 ;", 1L, 5);
execute("UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=1 WHERE k=2 AND c = 2 ;", 2L, 5);
execute("UPDATE %s USING TIMESTAMP ? AND TTL ? SET i=3 WHERE k=3 AND c = 3 ;", 1L, 5);
assertRows(execute("SELECT k, c, i, writetime(i) FROM %s "),
row(1, 1, 1, 1L),
row(1, 3, 1, 1L),
row(2, 2, 1, 2L),
row(3, 3, 3, 1L));
Thread.sleep(6*1000);
assertEmpty(execute("SELECT k, c, i, writetime(i) FROM %s "));
}
}
| |
/*
* Copyright (c) 2001, 2004, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.reflect;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.security.AccessController;
import java.security.Permission;
import java.security.PrivilegedAction;
/** <P> The master factory for all reflective objects, both those in
java.lang.reflect (Fields, Methods, Constructors) as well as their
delegates (FieldAccessors, MethodAccessors, ConstructorAccessors).
</P>
<P> The methods in this class are extremely unsafe and can cause
subversion of both the language and the verifier. For this reason,
they are all instance methods, and access to the constructor of
this factory is guarded by a security check, in similar style to
{@link sun.misc.Unsafe}. </P>
*/
public class ReflectionFactory {
private static boolean initted = false;
private static Permission reflectionFactoryAccessPerm
= new RuntimePermission("reflectionFactoryAccess");
private static ReflectionFactory soleInstance = new ReflectionFactory();
// Provides access to package-private mechanisms in java.lang.reflect
private static volatile LangReflectAccess langReflectAccess;
//
// "Inflation" mechanism. Loading bytecodes to implement
// Method.invoke() and Constructor.newInstance() currently costs
// 3-4x more than an invocation via native code for the first
// invocation (though subsequent invocations have been benchmarked
// to be over 20x faster). Unfortunately this cost increases
// startup time for certain applications that use reflection
// intensively (but only once per class) to bootstrap themselves.
// To avoid this penalty we reuse the existing JVM entry points
// for the first few invocations of Methods and Constructors and
// then switch to the bytecode-based implementations.
//
// Package-private to be accessible to NativeMethodAccessorImpl
// and NativeConstructorAccessorImpl
private static boolean noInflation = false;
private static int inflationThreshold = 15;
private ReflectionFactory() {
}
/**
* A convenience class for acquiring the capability to instantiate
* reflective objects. Use this instead of a raw call to {@link
* #getReflectionFactory} in order to avoid being limited by the
* permissions of your callers.
*
* <p>An instance of this class can be used as the argument of
* <code>AccessController.doPrivileged</code>.
*/
public static final class GetReflectionFactoryAction
implements PrivilegedAction {
public Object run() {
return getReflectionFactory();
}
}
/**
* Provides the caller with the capability to instantiate reflective
* objects.
*
* <p> First, if there is a security manager, its
* <code>checkPermission</code> method is called with a {@link
* java.lang.RuntimePermission} with target
* <code>"reflectionFactoryAccess"</code>. This may result in a
* security exception.
*
* <p> The returned <code>ReflectionFactory</code> object should be
* carefully guarded by the caller, since it can be used to read and
* write private data and invoke private methods, as well as to load
* unverified bytecodes. It must never be passed to untrusted code.
*
* @exception SecurityException if a security manager exists and its
* <code>checkPermission</code> method doesn't allow
* access to the RuntimePermission "reflectionFactoryAccess". */
public static ReflectionFactory getReflectionFactory() {
SecurityManager security = System.getSecurityManager();
if (security != null) {
// TO DO: security.checkReflectionFactoryAccess();
security.checkPermission(reflectionFactoryAccessPerm);
}
return soleInstance;
}
//--------------------------------------------------------------------------
//
// Routines used by java.lang.reflect
//
//
/** Called only by java.lang.reflect.Modifier's static initializer */
public void setLangReflectAccess(LangReflectAccess access) {
langReflectAccess = access;
}
/**
* Note: this routine can cause the declaring class for the field
* be initialized and therefore must not be called until the
* first get/set of this field.
* @param field the field
* @param override true if caller has overridden aaccessibility
*/
public FieldAccessor newFieldAccessor(Field field, boolean override) {
checkInitted();
return UnsafeFieldAccessorFactory.newFieldAccessor(field, override);
}
public MethodAccessor newMethodAccessor(Method method) {
checkInitted();
if (noInflation) {
return new MethodAccessorGenerator().
generateMethod(method.getDeclaringClass(),
method.getName(),
method.getParameterTypes(),
method.getReturnType(),
method.getExceptionTypes(),
method.getModifiers());
} else {
NativeMethodAccessorImpl acc =
new NativeMethodAccessorImpl(method);
DelegatingMethodAccessorImpl res =
new DelegatingMethodAccessorImpl(acc);
acc.setParent(res);
return res;
}
}
public ConstructorAccessor newConstructorAccessor(Constructor c) {
checkInitted();
Class declaringClass = c.getDeclaringClass();
if (Modifier.isAbstract(declaringClass.getModifiers())) {
return new InstantiationExceptionConstructorAccessorImpl(null);
}
if (declaringClass == Class.class) {
return new InstantiationExceptionConstructorAccessorImpl
("Can not instantiate java.lang.Class");
}
// Bootstrapping issue: since we use Class.newInstance() in
// the ConstructorAccessor generation process, we have to
// break the cycle here.
if (Reflection.isSubclassOf(declaringClass,
ConstructorAccessorImpl.class)) {
return new BootstrapConstructorAccessorImpl(c);
}
if (noInflation) {
return new MethodAccessorGenerator().
generateConstructor(c.getDeclaringClass(),
c.getParameterTypes(),
c.getExceptionTypes(),
c.getModifiers());
} else {
NativeConstructorAccessorImpl acc =
new NativeConstructorAccessorImpl(c);
DelegatingConstructorAccessorImpl res =
new DelegatingConstructorAccessorImpl(acc);
acc.setParent(res);
return res;
}
}
//--------------------------------------------------------------------------
//
// Routines used by java.lang
//
//
/** Creates a new java.lang.reflect.Field. Access checks as per
java.lang.reflect.AccessibleObject are not overridden. */
public Field newField(Class declaringClass,
String name,
Class type,
int modifiers,
int slot,
String signature,
byte[] annotations)
{
return langReflectAccess().newField(declaringClass,
name,
type,
modifiers,
slot,
signature,
annotations);
}
/** Creates a new java.lang.reflect.Method. Access checks as per
java.lang.reflect.AccessibleObject are not overridden. */
public Method newMethod(Class declaringClass,
String name,
Class[] parameterTypes,
Class returnType,
Class[] checkedExceptions,
int modifiers,
int slot,
String signature,
byte[] annotations,
byte[] parameterAnnotations,
byte[] annotationDefault)
{
return langReflectAccess().newMethod(declaringClass,
name,
parameterTypes,
returnType,
checkedExceptions,
modifiers,
slot,
signature,
annotations,
parameterAnnotations,
annotationDefault);
}
/** Creates a new java.lang.reflect.Constructor. Access checks as
per java.lang.reflect.AccessibleObject are not overridden. */
public Constructor newConstructor(Class declaringClass,
Class[] parameterTypes,
Class[] checkedExceptions,
int modifiers,
int slot,
String signature,
byte[] annotations,
byte[] parameterAnnotations)
{
return langReflectAccess().newConstructor(declaringClass,
parameterTypes,
checkedExceptions,
modifiers,
slot,
signature,
annotations,
parameterAnnotations);
}
/** Gets the MethodAccessor object for a java.lang.reflect.Method */
public MethodAccessor getMethodAccessor(Method m) {
return langReflectAccess().getMethodAccessor(m);
}
/** Sets the MethodAccessor object for a java.lang.reflect.Method */
public void setMethodAccessor(Method m, MethodAccessor accessor) {
langReflectAccess().setMethodAccessor(m, accessor);
}
/** Gets the ConstructorAccessor object for a
java.lang.reflect.Constructor */
public ConstructorAccessor getConstructorAccessor(Constructor c) {
return langReflectAccess().getConstructorAccessor(c);
}
/** Sets the ConstructorAccessor object for a
java.lang.reflect.Constructor */
public void setConstructorAccessor(Constructor c,
ConstructorAccessor accessor)
{
langReflectAccess().setConstructorAccessor(c, accessor);
}
/** Makes a copy of the passed method. The returned method is a
"child" of the passed one; see the comments in Method.java for
details. */
public Method copyMethod(Method arg) {
return langReflectAccess().copyMethod(arg);
}
/** Makes a copy of the passed field. The returned field is a
"child" of the passed one; see the comments in Field.java for
details. */
public Field copyField(Field arg) {
return langReflectAccess().copyField(arg);
}
/** Makes a copy of the passed constructor. The returned
constructor is a "child" of the passed one; see the comments
in Constructor.java for details. */
public Constructor copyConstructor(Constructor arg) {
return langReflectAccess().copyConstructor(arg);
}
//--------------------------------------------------------------------------
//
// Routines used by serialization
//
//
public Constructor newConstructorForSerialization
(Class classToInstantiate, Constructor constructorToCall)
{
// Fast path
if (constructorToCall.getDeclaringClass() == classToInstantiate) {
return constructorToCall;
}
ConstructorAccessor acc = new MethodAccessorGenerator().
generateSerializationConstructor(classToInstantiate,
constructorToCall.getParameterTypes(),
constructorToCall.getExceptionTypes(),
constructorToCall.getModifiers(),
constructorToCall.getDeclaringClass());
Constructor c = newConstructor(constructorToCall.getDeclaringClass(),
constructorToCall.getParameterTypes(),
constructorToCall.getExceptionTypes(),
constructorToCall.getModifiers(),
langReflectAccess().
getConstructorSlot(constructorToCall),
langReflectAccess().
getConstructorSignature(constructorToCall),
langReflectAccess().
getConstructorAnnotations(constructorToCall),
langReflectAccess().
getConstructorParameterAnnotations(constructorToCall));
setConstructorAccessor(c, acc);
return c;
}
//--------------------------------------------------------------------------
//
// Internals only below this point
//
static int inflationThreshold() {
return inflationThreshold;
}
/** We have to defer full initialization of this class until after
the static initializer is run since java.lang.reflect.Method's
static initializer (more properly, that for
java.lang.reflect.AccessibleObject) causes this class's to be
run, before the system properties are set up. */
private static void checkInitted() {
if (initted) return;
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
// Tests to ensure the system properties table is fully
// initialized. This is needed because reflection code is
// called very early in the initialization process (before
// command-line arguments have been parsed and therefore
// these user-settable properties installed.) We assume that
// if System.out is non-null then the System class has been
// fully initialized and that the bulk of the startup code
// has been run.
if (System.out == null) {
// java.lang.System not yet fully initialized
return null;
}
String val = System.getProperty("sun.reflect.noInflation");
if (val != null && val.equals("true")) {
noInflation = true;
}
val = System.getProperty("sun.reflect.inflationThreshold");
if (val != null) {
try {
inflationThreshold = Integer.parseInt(val);
} catch (NumberFormatException e) {
throw (RuntimeException)
new RuntimeException("Unable to parse property sun.reflect.inflationThreshold").
initCause(e);
}
}
initted = true;
return null;
}
});
}
private static LangReflectAccess langReflectAccess() {
if (langReflectAccess == null) {
// Call a static method to get class java.lang.reflect.Modifier
// initialized. Its static initializer will cause
// setLangReflectAccess() to be called from the context of the
// java.lang.reflect package.
Modifier.isPublic(Modifier.PUBLIC);
}
return langReflectAccess;
}
}
| |
package net.scapeemulator.game.model.player.trade;
import net.scapeemulator.game.dispatcher.button.ButtonDispatcher;
import net.scapeemulator.game.model.ExtendedOption;
import net.scapeemulator.game.model.player.ScriptInputListenerAdapter;
import net.scapeemulator.game.model.player.Item;
import net.scapeemulator.game.model.player.Player;
import net.scapeemulator.game.model.player.interfaces.ComponentListener;
import net.scapeemulator.game.model.player.interfaces.InterfaceSet.Component;
import net.scapeemulator.game.model.player.inventory.*;
import net.scapeemulator.game.msg.impl.ScriptMessage;
import net.scapeemulator.game.msg.impl.inter.InterfaceAccessMessage;
import net.scapeemulator.game.msg.impl.inter.InterfaceVisibleMessage;
import net.scapeemulator.game.task.Action;
/**
* Represents a trade session between two players.
*
* @author David Insley
*/
public class TradeSession extends ComponentListener {
static {
ButtonDispatcher.getInstance().bind(new TradeInterfaceHandler());
}
private static final int VERIFY_WINDOW = 334;
private static final int TRADE_WINDOW = 335;
private static final int TRADE_INVENTORY = 336;
private static final String WHITE = "<col=FFFFFF>";
private static final String ORANGE = "<col=FF9040>";
private static final String WAITING = "Waiting for other player...";
private static final String ACCEPTED = "Other player has accepted.";
private static final int[] AMOUNTS = { 1, 5, 10, Integer.MAX_VALUE };
private final Player player;
private final Player otherPlayer;
private TradeStatus status = TradeStatus.INIT;
private TradeSession otherSession;
private Inventory inventory;
private Inventory tradeInventory;
public TradeSession(Player player, Player otherPlayer) {
this.player = player;
this.otherPlayer = otherPlayer;
}
public void init() {
player.startAction(new Action<Player>(player, 1, true) {
@Override
public void execute() {
}
@Override
public void stop() {
decline();
super.stop();
}
});
otherSession = otherPlayer.getTradeSession();
// Configure player inventory
player.getInventory().lock();
inventory = new Inventory(player.getInventory());
inventory.addListener(new InventorySpaceChangedListener());
inventory.addListener(new InventoryMessageListener(player, -1, -1, 93));
player.send(new InterfaceAccessMessage(TRADE_INVENTORY, 0, 0, 27, 1278));
player.send(new ScriptMessage(150, "IviiiIsssssssss", "", "", "", "", "Offer-X", "Offer-All", "Offer-10", "Offer-5", "Offer", -1, 0, 7, 4, 93, TRADE_INVENTORY << 16));
player.getInterfaceSet().openInventory(TRADE_INVENTORY);
inventory.refresh();
// Configure trade screen
player.send(new InterfaceAccessMessage(TRADE_WINDOW, 30, 0, 27, 1278));
player.send(new InterfaceAccessMessage(TRADE_WINDOW, 32, 0, 27, 1278));
player.send(new ScriptMessage(150, "IviiiIsssssssss", "", "", "", "", "Remove-X", "Remove-All", "Remove-10", "Remove-5", "Remove", -1, 0, 7, 4, 90, TRADE_WINDOW << 16 | 30));
player.send(new ScriptMessage(695, "IviiiIsssssssss", "", "", "", "", "", "", "", "", "", -1, 0, 7, 4, 90, TRADE_WINDOW << 16 | 32));
player.setInterfaceText(TRADE_WINDOW, 15, "Trading with: " + otherPlayer.getDisplayName());
player.setInterfaceText(TRADE_WINDOW, 36, "");
tradeInventory = new Inventory(player, 28);
tradeInventory.addListener(new InventoryMessageListener(player, -1, -1, 90));
tradeInventory.addListener(new TradeItemsChangedListener());
tradeInventory.addListener(new InventoryMessageListener(otherPlayer, -2, 60981, 90));
player.getInterfaceSet().openWindow(TRADE_WINDOW, this);
tradeInventory.refresh();
status = TradeStatus.UPDATING_ITEMS;
}
private void secondWindow() {
player.setInterfaceText(VERIFY_WINDOW, 37, itemsToString());
player.setInterfaceText(VERIFY_WINDOW, 41, otherPlayer.getTradeSession().itemsToString());
player.setInterfaceText(VERIFY_WINDOW, 45, "Trading with:<br>" + otherPlayer.getDisplayName());
player.send(new InterfaceVisibleMessage(VERIFY_WINDOW, 37, true));
player.send(new InterfaceVisibleMessage(VERIFY_WINDOW, 41, true));
// player.send(new InterfaceVisibleMessage(VERIFY_WINDOW, 46, true)); TODO TRADE MODIFIED
player.getInterfaceSet().openWindow(VERIFY_WINDOW);
status = TradeStatus.VERIFYING;
}
public void handleInterfaceClick(int windowId, int childId, final int dynamicId, ExtendedOption option) {
if (player.getInterfaceSet().getWindow().getCurrentId() != windowId) {
return;
}
switch (windowId) {
case VERIFY_WINDOW:
switch (childId) {
case 20:
accept();
break;
case 8: // Close
case 21: // Decline
decline();
break;
}
break;
case TRADE_WINDOW:
switch (childId) {
case 16:
accept();
break;
case 12: // Close
case 18: // Decline
decline();
break;
case 30:
if (dynamicId < 0 || dynamicId > 27) {
return;
}
final Item item = tradeInventory.get(dynamicId);
if (item == null) {
return;
}
if (option == ExtendedOption.NINE) {
player.sendMessage(item.getDefinition().getExamine());
return;
}
if (status != TradeStatus.UPDATING_ITEMS && status != TradeStatus.WAITING_FIRST) {
return;
}
switch (option) {
case ONE:
case TWO:
case THREE:
case FOUR:
Item removed = tradeInventory.remove(new Item(item.getId(), AMOUNTS[option.toInteger()]), dynamicId);
inventory.add(removed);
break;
case FIVE:
player.getScriptInput().showIntegerScriptInput(new ScriptInputListenerAdapter() {
@Override
public void intInputReceived(int value) {
Item removed = tradeInventory.remove(new Item(item.getId(), value), dynamicId);
inventory.add(removed);
player.getScriptInput().reset();
}
});
break;
default:
return;
}
break;
}
break;
}
}
public void handleInventoryClick(final int dynamicId, ExtendedOption option) {
if (player.getInterfaceSet().getInventory().getCurrentId() != TRADE_INVENTORY) {
return;
}
if (dynamicId < 0 || dynamicId > 27) {
return;
}
final Item item = inventory.get(dynamicId);
if (item == null) {
return;
}
if (option == ExtendedOption.NINE) {
player.sendMessage(item.getDefinition().getExamine());
return;
}
if (status != TradeStatus.UPDATING_ITEMS && status != TradeStatus.WAITING_FIRST) {
return;
}
switch (option) {
case ONE:
case TWO:
case THREE:
case FOUR:
Item removed = inventory.remove(new Item(item.getId(), AMOUNTS[option.toInteger()]), dynamicId);
tradeInventory.add(removed);
break;
case FIVE:
player.getScriptInput().showIntegerScriptInput(new ScriptInputListenerAdapter() {
@Override
public void intInputReceived(int value) {
Item removed = inventory.remove(new Item(item.getId(), value), dynamicId);
tradeInventory.add(removed);
player.getScriptInput().reset();
}
});
break;
default:
return;
}
}
private void accept() {
switch (status) {
case INIT:
case WAITING_FIRST:
case WAITING_FINISH:
break;
case UPDATING_ITEMS:
if (otherSession.status == TradeStatus.WAITING_FIRST) {
tradeInventory.lock();
otherSession.tradeInventory.lock();
secondWindow();
otherSession.secondWindow();
} else {
status = TradeStatus.WAITING_FIRST;
player.setInterfaceText(TRADE_WINDOW, 36, WAITING);
otherPlayer.setInterfaceText(TRADE_WINDOW, 36, ACCEPTED);
}
break;
case VERIFYING:
if (otherSession.status == TradeStatus.WAITING_FINISH) {
if (canComplete() && otherSession.canComplete()) {
player.sendMessage("Trade accepted.");
otherPlayer.sendMessage("Trade accepted.");
player.getInventory().unlock();
player.getInventory().removeAll(tradeInventory.toArray());
player.getInventory().addAll(otherSession.tradeInventory.toArray());
otherPlayer.getInventory().unlock();
otherPlayer.getInventory().removeAll(otherSession.tradeInventory.toArray());
otherPlayer.getInventory().addAll(tradeInventory.toArray());
}
closeTrade();
otherSession.closeTrade();
} else {
status = TradeStatus.WAITING_FINISH;
player.setInterfaceText(VERIFY_WINDOW, 33, WAITING);
otherPlayer.setInterfaceText(VERIFY_WINDOW, 33, ACCEPTED);
}
break;
}
}
private boolean canComplete() {
/*
* Copy the players inventory into a temporary one we can manipulate. The players inventory
* shouldn't have been modified since the start of the trade because we locked it.
*/
Inventory temp = new Inventory(player.getInventory());
if (!temp.removeAll(tradeInventory.toArray()).isEmpty()) {
player.sendMessage("There was a problem with the trade. Please try again.");
otherPlayer.sendMessage("There was a problem with the trade. Please try again.");
return false;
}
if (!temp.addAll(otherSession.tradeInventory.toArray()).isEmpty()) {
player.sendMessage("You do not have enough free inventory space to complete that transaction.");
otherPlayer.sendMessage("Other player does not have enough free inventory space.");
return false;
}
return true;
}
/**
* Called when the player closes the interface or clicks decline.
*/
private void decline() {
player.sendMessage("Trade cancelled.");
otherPlayer.sendMessage("Other player declined the trade.");
closeTrade();
otherSession.closeTrade();
}
private void closeTrade() {
player.getInterfaceSet().getWindow().removeListener();
player.getInterfaceSet().closeInventory();
player.getInterfaceSet().closeWindow();
player.getScriptInput().reset();
player.getInventory().unlock();
player.getInventory().refresh();
player.setTradeSession(null);
}
private String itemsToString() {
// TODO two columns?
if (tradeInventory.isEmpty()) {
return WHITE + "Absolutely nothing!";
}
String string = "";
for (Item item : tradeInventory.toArray()) {
if (item == null) {
continue;
}
string += ORANGE + item.getDefinition().getName();
string += (item.getAmount() == 1 ? "" : (WHITE + " x " + item.getAmount())) + "<br>";
}
return string;
}
private class TradeItemsChangedListener implements InventoryListener {
@Override
public void itemChanged(Inventory inventory, int slot, Item item, Item oldItem) {
if (oldItem != null) {
otherPlayer.send(new ScriptMessage(143, "Iiii", slot, 7, 4, TRADE_WINDOW << 16 | 33));
}
itemsChanged(inventory);
}
@Override
public void itemsChanged(Inventory inventory) {
player.setInterfaceText(TRADE_WINDOW, 36, "");
otherPlayer.setInterfaceText(TRADE_WINDOW, 36, "");
status = TradeStatus.UPDATING_ITEMS;
otherSession.status = TradeStatus.UPDATING_ITEMS;
}
@Override
public void capacityExceeded(Inventory inventory) {
}
}
private class InventorySpaceChangedListener implements InventoryListener {
@Override
public void itemChanged(Inventory inventory, int slot, Item item, Item oldItem) {
itemsChanged(inventory);
}
@Override
public void itemsChanged(Inventory inventory) {
otherPlayer.setInterfaceText(TRADE_WINDOW, 21, player.getDisplayName() + " has " + inventory.freeSlots() + " free inventory slots.");
}
@Override
public void capacityExceeded(Inventory inventory) {
}
}
@Override
public void inputPressed(Component component, int componentId, int dynamicId) {
}
@Override
public void componentClosed(Component component) {
decline();
}
@Override
public boolean componentChanged(Component component, int oldId) {
if(component.getCurrentId() != VERIFY_WINDOW) {
decline();
return false;
}
return true;
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.wearable.watchface.CanvasWatchFaceService;
import android.support.wearable.watchface.WatchFaceStyle;
import android.text.format.DateFormat;
import android.util.Log;
import android.view.Gravity;
import android.view.SurfaceHolder;
import android.view.WindowInsets;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.wearable.Asset;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.DataMapItem;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.Wearable;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
/**
* Digital watch face with seconds. In ambient mode, the seconds aren't displayed. On devices with
* low-bit ambient mode, the text is drawn without anti-aliasing in ambient mode.
*/
public class MyWatchFace extends CanvasWatchFaceService {
private static final Typeface NORMAL_TYPEFACE =
Typeface.create(Typeface.SANS_SERIF, Typeface.NORMAL);
private static final String PATH="/weather";
private static final String LOG_TAG="MyWatchFace";
private static final String TEMP_HIGH = "TEMP_HIGH";
private static final String TEMP_LOW="TEMP_LOW";
private static final String WEATHER_ICON="WEATHER_ICON";
private String high;
private String low;
private Bitmap bitmap;
/**
* Update rate in milliseconds for interactive mode. We update once a second since seconds are
* displayed in interactive mode.
*/
private static final long INTERACTIVE_UPDATE_RATE_MS = TimeUnit.SECONDS.toMillis(1);
/**
* Handler message id for updating the time periodically in interactive mode.
*/
private static final int MSG_UPDATE_TIME = 0;
@Override
public Engine onCreateEngine() {
return new Engine();
}
private static class EngineHandler extends Handler {
private final WeakReference<MyWatchFace.Engine> mWeakReference;
public EngineHandler(MyWatchFace.Engine reference) {
mWeakReference = new WeakReference<>(reference);
}
@Override
public void handleMessage(Message msg) {
MyWatchFace.Engine engine = mWeakReference.get();
if (engine != null) {
switch (msg.what) {
case MSG_UPDATE_TIME:
engine.handleUpdateTimeMessage();
break;
}
}
}
}
private class Engine extends CanvasWatchFaceService.Engine
implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, DataApi.DataListener {
final Handler mUpdateTimeHandler = new EngineHandler(this);
boolean mRegisteredTimeZoneReceiver = false;
Paint mBackgroundPaint;
Paint mTextPaint;
Paint mDatePaint;
Paint mTempHighPaint;
Paint mTempLowPaint;
boolean mAmbient;
Calendar mCalendar;
final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
mCalendar.setTimeZone(TimeZone.getDefault());
invalidate();
}
};
float mXOffset;
float mYOffset;
GoogleApiClient mGoogleApiClient ;
/**
* Whether the display supports fewer bits for each color in ambient mode. When true, we
* disable anti-aliasing in ambient mode.
*/
boolean mLowBitAmbient;
@Override
public void onCreate(SurfaceHolder holder) {
super.onCreate(holder);
setWatchFaceStyle(new WatchFaceStyle.Builder(MyWatchFace.this)
.setCardPeekMode(WatchFaceStyle.PEEK_MODE_VARIABLE)
.setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE)
.setHotwordIndicatorGravity(Gravity.CENTER)
.setShowSystemUiTime(false)
.setAcceptsTapEvents(true)
.build());
Resources resources = MyWatchFace.this.getResources();
mYOffset = resources.getDimension(R.dimen.digital_y_offset);
mBackgroundPaint = new Paint();
mBackgroundPaint.setColor(resources.getColor(R.color.background));
mTextPaint = new Paint();
mTextPaint = createTextPaint(resources.getColor(R.color.digital_text));
mCalendar = Calendar.getInstance();
mDatePaint = new Paint();
mDatePaint= createTextPaint(resources.getColor(R.color.digital_gray));
mTempHighPaint = new Paint();
mTempHighPaint= createTextPaint(resources.getColor(R.color.digital_text));
mTempLowPaint = new Paint();
mTempLowPaint = createTextPaint(resources.getColor(R.color.digital_gray));
mGoogleApiClient = new GoogleApiClient.Builder(MyWatchFace.this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(Wearable.API)
.build();
mGoogleApiClient.connect();
}
@Override
public void onDestroy() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
super.onDestroy();
//mGoogleApiClient.disconnect();
}
private Paint createTextPaint(int textColor) {
Paint paint = new Paint();
paint.setColor(textColor);
paint.setTypeface(NORMAL_TYPEFACE);
paint.setAntiAlias(true);
return paint;
}
@Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
registerReceiver();
// Update time zone in case it changed while we weren't visible.
mCalendar.setTimeZone(TimeZone.getDefault());
invalidate();
} else {
unregisterReceiver();
if (mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
Wearable.DataApi.removeListener(mGoogleApiClient, this);
//mGoogleApiClient.disconnect();
}
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
private void registerReceiver() {
if (mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = true;
IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED);
MyWatchFace.this.registerReceiver(mTimeZoneReceiver, filter);
// mGoogleApiClient.connect();
}
private void unregisterReceiver() {
if (!mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = false;
MyWatchFace.this.unregisterReceiver(mTimeZoneReceiver);
}
@Override
public void onApplyWindowInsets(WindowInsets insets) {
super.onApplyWindowInsets(insets);
// Load resources that have alternate values for round watches.
Resources resources = MyWatchFace.this.getResources();
boolean isRound = insets.isRound();
}
@Override
public void onPropertiesChanged(Bundle properties) {
super.onPropertiesChanged(properties);
mLowBitAmbient = properties.getBoolean(PROPERTY_LOW_BIT_AMBIENT, false);
}
@Override
public void onTimeTick() {
super.onTimeTick();
invalidate();
}
@Override
public void onAmbientModeChanged(boolean inAmbientMode) {
super.onAmbientModeChanged(inAmbientMode);
if (mAmbient != inAmbientMode) {
mAmbient = inAmbientMode;
if (mLowBitAmbient) {
mTextPaint.setAntiAlias(!inAmbientMode);
}
invalidate();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
/**
* Captures tap event (and tap type) and toggles the background color if the user finishes
* a tap.
*/
@Override
public void onTapCommand(int tapType, int x, int y, long eventTime) {
switch (tapType) {
case TAP_TYPE_TOUCH:
// The user has started touching the screen.
break;
case TAP_TYPE_TOUCH_CANCEL:
// The user has started a different gesture or otherwise cancelled the tap.
break;
case TAP_TYPE_TAP:
// The user has completed the tap gesture.
// TODO: Add code to handle the tap gesture.
Toast.makeText(getApplicationContext(), R.string.message, Toast.LENGTH_SHORT)
.show();
break;
}
invalidate();
}
@Override
public void onDraw(Canvas canvas, Rect bounds) {
// Draw the background.
if (isInAmbientMode()) {
canvas.drawColor(Color.BLACK);
} else {
canvas.drawRect(0, 0, bounds.width(), bounds.height(), mBackgroundPaint);
}
// Draw H:MM in ambient mode or H:MM:SS in interactive mode.
long now = System.currentTimeMillis();
mCalendar.setTimeInMillis(now);
boolean is24Hour = DateFormat.is24HourFormat(MyWatchFace.this);
String text;
if(is24Hour){
text =
String.format("%d:%02d", mCalendar.get(Calendar.HOUR_OF_DAY),
mCalendar.get(Calendar.MINUTE));
}
else{
text =
String.format("%d:%02d", mCalendar.get(Calendar.HOUR),
mCalendar.get(Calendar.MINUTE));
}
mYOffset = bounds.height()/4;
mTextPaint.setTextSize(bounds.height()/6);
canvas.drawText(text, bounds.centerX()-mTextPaint.measureText(text)/2, mYOffset, mTextPaint);
Calendar c = Calendar.getInstance();
SimpleDateFormat dateFormat= new SimpleDateFormat("EEE, MMM dd yyyy");
String date = dateFormat.format(c.getTime());
String datetext = mAmbient ? "":date;
mDatePaint.setTextSize(bounds.height()/10);
canvas.drawText(datetext, bounds.centerX()-mDatePaint.measureText(datetext)/2, mYOffset+50, mDatePaint);
canvas.drawLine(bounds.centerX()-bounds.width()/6, mYOffset+90, bounds.centerX()+bounds.width()/6, mYOffset+90, mDatePaint);
mTempHighPaint.setTextSize(bounds.height()/8);
mTempLowPaint.setTextSize(bounds.height()/8);
if(high !=null && low!=null ) {
canvas.drawText(high, bounds.centerX() - 30, bounds.centerY()+100, mTempHighPaint);
canvas.drawText(low, bounds.centerX() +mTempHighPaint.measureText(high) ,bounds.centerY()+100, mTempLowPaint);
}
if(bitmap!=null){
canvas.drawBitmap(bitmap, bounds.width()/10, bounds.centerY()+30, mTempHighPaint);
}
}
/**
* Starts the {@link #mUpdateTimeHandler} timer if it should be running and isn't currently
* or stops it if it shouldn't be running but currently is.
*/
private void updateTimer() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
if (shouldTimerBeRunning()) {
mUpdateTimeHandler.sendEmptyMessage(MSG_UPDATE_TIME);
}
}
/**
* Returns whether the {@link #mUpdateTimeHandler} timer should be running. The timer should
* only run when we're visible and in interactive mode.
*/
private boolean shouldTimerBeRunning() {
return isVisible() && !isInAmbientMode();
}
/**
* Handle updating the time periodically in interactive mode.
*/
private void handleUpdateTimeMessage() {
invalidate();
if (shouldTimerBeRunning()) {
long timeMs = System.currentTimeMillis();
long delayMs = INTERACTIVE_UPDATE_RATE_MS
- (timeMs % INTERACTIVE_UPDATE_RATE_MS);
mUpdateTimeHandler.sendEmptyMessageDelayed(MSG_UPDATE_TIME, delayMs);
}
}
@Override
public void onConnected(@Nullable Bundle bundle) {
Wearable.DataApi.addListener(mGoogleApiClient, this);
Log.d(LOG_TAG, "GoogleApi onConnected");
}
@Override
public void onConnectionSuspended(int i) {
Log.d(LOG_TAG, "GoogleApi onConnectionSuspended");
mGoogleApiClient.connect();
}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
Log.d(LOG_TAG, "GoogleApi onConnectionFailed");
}
@Override
public void onDataChanged(DataEventBuffer dataEvents) {
Log.d(LOG_TAG, "onDataChanged");
for (DataEvent dataEvent : dataEvents) {
if (dataEvent.getType() == DataEvent.TYPE_CHANGED) {
DataMap dataMap = DataMapItem.fromDataItem(dataEvent.getDataItem()).getDataMap();
String path = dataEvent.getDataItem().getUri().getPath();
Log.d(LOG_TAG, path+dataMap.size());
if (dataMap.containsKey(TEMP_HIGH)) {
high = dataMap.getString(TEMP_HIGH);
Log.d(LOG_TAG, "High= " + high);
} else {
Log.d(LOG_TAG, "No high temp");
}
if (dataMap.containsKey(TEMP_LOW)) {
low = dataMap.getString(TEMP_LOW);
Log.d(LOG_TAG, "Low = " + low);
} else {
Log.d(LOG_TAG, "No low temp");
}
if(dataMap.containsKey("WEATHER_ID")){
int weatherid = dataMap.getInt("WEATHER_ID");
bitmap= BitmapFactory.decodeResource(getResources(), WatchUtility.getArtResourceForWeatherCondition(weatherid ));
bitmap = Bitmap.createScaledBitmap(bitmap, 90, 90, false);
Log.d(LOG_TAG, "weather id"+dataMap.getInt("WEATHER_ID"));
//Asset profileAsset = dataMap.getAsset(WEATHER_ICON);
//Bitmap bitmap = loadBitmapFromAsset(profileAsset);
}
else{
Log.d(LOG_TAG, "no icon");
}
}
invalidate();
}
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import com.dd.plist.NSNumber;
import com.dd.plist.NSObject;
import com.dd.plist.NSString;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.HeaderVisibility;
import com.facebook.buck.cxx.NativeTestable;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.Either;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.HasPostBuildSteps;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.shell.DefaultShellStep;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.CopyStep;
import com.facebook.buck.step.fs.FindAndReplaceStep;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.MkdirStep;
import com.facebook.buck.step.fs.WriteFileStep;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.ProcessExecutorParams;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.io.Files;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
/**
* Creates a bundle: a directory containing files and subdirectories, described by an Info.plist.
*/
public class AppleBundle extends AbstractBuildRule implements HasPostBuildSteps, NativeTestable {
public enum DebugInfoFormat {
/**
* Produces a binary with the debug map stripped.
*/
NONE,
/**
* Generate a .dSYM file from the binary and its constituent object files.
*/
DSYM,
}
private static final Logger LOG = Logger.get(AppleBundle.class);
private static final String CODE_SIGN_ENTITLEMENTS = "CODE_SIGN_ENTITLEMENTS";
private static final String CODE_SIGN_IDENTITY = "CODE_SIGN_IDENTITY";
@AddToRuleKey
private final String extension;
@AddToRuleKey
private final SourcePath infoPlist;
@AddToRuleKey
private final ImmutableMap<String, String> infoPlistSubstitutions;
@AddToRuleKey
private final Optional<BuildRule> binary;
@AddToRuleKey
private final AppleBundleDestinations destinations;
@AddToRuleKey
private final Set<SourcePath> resourceDirs;
@AddToRuleKey
private final Set<SourcePath> resourceFiles;
@AddToRuleKey
private final Set<SourcePath> dirsContainingResourceDirs;
@AddToRuleKey
private final Optional<ImmutableSet<SourcePath>> resourceVariantFiles;
@AddToRuleKey
private final Tool ibtool;
@AddToRuleKey
private final Tool dsymutil;
@AddToRuleKey
private final Tool strip;
@AddToRuleKey
private final ImmutableSortedSet<BuildTarget> tests;
@AddToRuleKey
private final String platformName;
@AddToRuleKey
private final String sdkName;
@AddToRuleKey
private final Optional<ImmutableSet<ProvisioningProfileMetadata>> provisioningProfiles;
@AddToRuleKey
private final Optional<CodeSignIdentity> codeSignIdentity;
@AddToRuleKey
private final DebugInfoFormat debugInfoFormat;
private final ImmutableSet<SourcePath> extensionBundlePaths;
private final Optional<AppleAssetCatalog> assetCatalog;
private final String binaryName;
private final Path bundleRoot;
private final Path binaryPath;
private final Path bundleBinaryPath;
private final Path dsymPath;
private final boolean hasBinary;
AppleBundle(
BuildRuleParams params,
SourcePathResolver resolver,
Either<AppleBundleExtension, String> extension,
SourcePath infoPlist,
Map<String, String> infoPlistSubstitutions,
Optional<BuildRule> binary,
AppleBundleDestinations destinations,
Set<SourcePath> resourceDirs,
Set<SourcePath> resourceFiles,
Set<SourcePath> dirsContainingResourceDirs,
ImmutableSet<SourcePath> extensionBundlePaths,
Optional<ImmutableSet<SourcePath>> resourceVariantFiles,
Tool ibtool,
Tool dsymutil,
Tool strip,
Optional<AppleAssetCatalog> assetCatalog,
Set<BuildTarget> tests,
AppleSdk sdk,
ImmutableSet<CodeSignIdentity> allValidCodeSignIdentities,
Optional<SourcePath> provisioningProfileSearchPath,
DebugInfoFormat debugInfoFormat) {
super(params, resolver);
this.extension = extension.isLeft() ?
extension.getLeft().toFileExtension() :
extension.getRight();
this.infoPlist = infoPlist;
this.infoPlistSubstitutions = ImmutableMap.copyOf(infoPlistSubstitutions);
this.binary = binary;
this.destinations = destinations;
this.resourceDirs = resourceDirs;
this.resourceFiles = resourceFiles;
this.dirsContainingResourceDirs = dirsContainingResourceDirs;
this.extensionBundlePaths = extensionBundlePaths;
this.resourceVariantFiles = resourceVariantFiles;
this.ibtool = ibtool;
this.dsymutil = dsymutil;
this.strip = strip;
this.assetCatalog = assetCatalog;
this.binaryName = getBinaryName(getBuildTarget());
this.bundleRoot = getBundleRoot(getBuildTarget(), this.extension);
this.binaryPath = this.destinations.getExecutablesPath()
.resolve(this.binaryName);
this.tests = ImmutableSortedSet.copyOf(tests);
this.platformName = sdk.getApplePlatform().getName();
this.sdkName = sdk.getName();
this.debugInfoFormat = debugInfoFormat;
// We need to resolve the possible set of profiles and code sign identity at construction time
// because they form part of the rule key.
if (binary.isPresent() && ApplePlatform.needsCodeSign(this.platformName)) {
final Path searchPath;
if (provisioningProfileSearchPath.isPresent()) {
searchPath = resolver.getResolvedPath(provisioningProfileSearchPath.get());
} else {
searchPath = Paths.get(System.getProperty("user.home") +
"/Library/MobileDevice/Provisioning Profiles");
}
Optional<ImmutableSet<ProvisioningProfileMetadata>> provisioningProfiles;
try {
provisioningProfiles = Optional.of(
ProvisioningProfileCopyStep.findProfilesInPath(searchPath));
} catch (InterruptedException e) {
// We get here if the user pressed Ctrl-C during the profile discovery step.
// In this case, we'll fail anyway since the set of profiles will be empty.
provisioningProfiles = Optional.of(ImmutableSet.<ProvisioningProfileMetadata>of());
}
this.provisioningProfiles = provisioningProfiles;
Optional<CodeSignIdentity> foundIdentity = Optional.absent();
Optional<String> customIdentity = InfoPlistSubstitution.getVariableExpansionForPlatform(
CODE_SIGN_IDENTITY,
this.platformName,
this.infoPlistSubstitutions);
if (customIdentity.isPresent()) {
LOG.debug("Bundle specifies custom code signing identity: " + customIdentity.get());
if (CodeSignIdentity.isHash(customIdentity.get())) {
for (CodeSignIdentity identity : allValidCodeSignIdentities) {
if (identity.getHash().equals(customIdentity.get())) {
foundIdentity = Optional.of(identity);
break;
}
}
} else {
for (CodeSignIdentity identity : allValidCodeSignIdentities) {
if (identity.getFullName().startsWith(customIdentity.get())) {
foundIdentity = Optional.of(identity);
break;
}
}
}
} else if (!allValidCodeSignIdentities.isEmpty()) {
LOG.debug("Using default code signing identity");
Iterator<CodeSignIdentity> it = allValidCodeSignIdentities.iterator();
foundIdentity = Optional.of(it.next());
}
if (!foundIdentity.isPresent()) {
throw new HumanReadableException("The platform " + platformName + " for this target " +
"requires code signing but couldn't find a compatible code signing identity to use.");
}
LOG.debug("Code signing identity is " + foundIdentity.toString());
this.codeSignIdentity = foundIdentity;
} else {
this.provisioningProfiles = Optional.absent();
this.codeSignIdentity = Optional.absent();
}
bundleBinaryPath = bundleRoot.resolve(binaryPath);
dsymPath = bundleBinaryPath
.getParent()
.getParent()
.resolve(bundleBinaryPath.getFileName().toString() + ".dSYM");
hasBinary = binary.isPresent() && binary.get().getPathToOutput() != null;
}
public static String getBinaryName(BuildTarget buildTarget) {
return buildTarget.getShortName();
}
public static Path getBundleRoot(BuildTarget buildTarget, String extension) {
return BuildTargets
.getGenPath(buildTarget, "%s")
.resolve(getBinaryName(buildTarget) + "." + extension);
}
public String getExtension() {
return extension;
}
@Override
public Path getPathToOutput() {
return bundleRoot;
}
public Path getInfoPlistPath() {
return getMetadataPath().resolve("Info.plist");
}
public Path getUnzippedOutputFilePathToBinary() {
return this.binaryPath;
}
private Path getMetadataPath() {
return bundleRoot.resolve(destinations.getMetadataPath());
}
public String getPlatformName() { return platformName; }
public Optional<BuildRule> getBinary() { return binary; }
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context,
BuildableContext buildableContext) {
ImmutableList.Builder<Step> stepsBuilder = ImmutableList.builder();
Path metadataPath = getMetadataPath();
Path infoPlistInputPath = getResolver().getPath(infoPlist);
Path infoPlistSubstitutionTempPath =
BuildTargets.getScratchPath(getBuildTarget(), "%s.plist");
Path infoPlistOutputPath = metadataPath.resolve("Info.plist");
stepsBuilder.add(
new MakeCleanDirectoryStep(getProjectFilesystem(), bundleRoot),
new MkdirStep(getProjectFilesystem(), metadataPath),
// TODO(user): This is only appropriate for .app bundles.
new WriteFileStep(
getProjectFilesystem(),
"APPLWRUN",
metadataPath.resolve("PkgInfo"),
/* executable */ false),
new FindAndReplaceStep(
getProjectFilesystem(),
infoPlistInputPath,
infoPlistSubstitutionTempPath,
InfoPlistSubstitution.createVariableExpansionFunction(
withDefaults(
infoPlistSubstitutions,
ImmutableMap.of(
"EXECUTABLE_NAME", binaryName,
"PRODUCT_NAME", binaryName
))
)),
new PlistProcessStep(
getProjectFilesystem(),
infoPlistSubstitutionTempPath,
infoPlistOutputPath,
getInfoPlistAdditionalKeys(platformName, sdkName),
getInfoPlistOverrideKeys(platformName),
PlistProcessStep.OutputFormat.BINARY));
if (hasBinary) {
stepsBuilder.add(
new MkdirStep(
getProjectFilesystem(),
bundleRoot.resolve(this.destinations.getExecutablesPath())));
Path bundleBinaryPath = bundleRoot.resolve(binaryPath);
stepsBuilder.add(
CopyStep.forFile(
getProjectFilesystem(),
binary.get().getPathToOutput(),
bundleBinaryPath));
if (debugInfoFormat == DebugInfoFormat.DSYM) {
buildableContext.recordArtifact(dsymPath);
stepsBuilder.add(
new DsymStep(
getProjectFilesystem(),
dsymutil.getCommandPrefix(getResolver()),
bundleBinaryPath,
dsymPath));
}
stepsBuilder.add(
new DefaultShellStep(
getProjectFilesystem().getRootPath(),
ImmutableList.<String>builder()
.addAll(strip.getCommandPrefix(getResolver()))
.add("-S")
.add(getProjectFilesystem().resolve(bundleBinaryPath).toString())
.build()));
}
Path bundleDestinationPath = bundleRoot.resolve(this.destinations.getResourcesPath());
for (SourcePath dir : resourceDirs) {
stepsBuilder.add(new MkdirStep(getProjectFilesystem(), bundleDestinationPath));
stepsBuilder.add(
CopyStep.forDirectory(
getProjectFilesystem(),
getResolver().getPath(dir),
bundleDestinationPath,
CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS));
}
for (SourcePath dir : dirsContainingResourceDirs) {
stepsBuilder.add(new MkdirStep(getProjectFilesystem(), bundleDestinationPath));
stepsBuilder.add(
CopyStep.forDirectory(
getProjectFilesystem(),
getResolver().getPath(dir),
bundleDestinationPath,
CopyStep.DirectoryMode.CONTENTS_ONLY));
}
for (SourcePath file : resourceFiles) {
stepsBuilder.add(new MkdirStep(getProjectFilesystem(), bundleDestinationPath));
Path resolvedFilePath = getResolver().getPath(file);
Path destinationPath = bundleDestinationPath.resolve(resolvedFilePath.getFileName());
addResourceProcessingSteps(resolvedFilePath, destinationPath, stepsBuilder);
}
addStepsToCopyExtensionBundlesDependencies(stepsBuilder);
if (resourceVariantFiles.isPresent()) {
for (SourcePath variantSourcePath : resourceVariantFiles.get()) {
Path variantFilePath = getResolver().getPath(variantSourcePath);
Path variantDirectory = variantFilePath.getParent();
if (variantDirectory == null || !variantDirectory.toString().endsWith(".lproj")) {
throw new HumanReadableException(
"Variant files have to be in a directory with name ending in '.lproj', " +
"but '%s' is not.",
variantFilePath);
}
Path bundleVariantDestinationPath =
bundleDestinationPath.resolve(variantDirectory.getFileName());
stepsBuilder.add(new MkdirStep(getProjectFilesystem(), bundleVariantDestinationPath));
Path destinationPath = bundleVariantDestinationPath.resolve(variantFilePath.getFileName());
addResourceProcessingSteps(variantFilePath, destinationPath, stepsBuilder);
}
}
if (assetCatalog.isPresent()) {
Path bundleDir = assetCatalog.get().getOutputDir();
stepsBuilder.add(
CopyStep.forDirectory(
getProjectFilesystem(),
bundleDir,
bundleRoot,
CopyStep.DirectoryMode.CONTENTS_ONLY));
}
// Copy the .mobileprovision file if the platform requires it.
if (provisioningProfiles.isPresent()) {
Optional<Path> entitlementsPlist = Optional.absent();
final String srcRoot = getProjectFilesystem().getRootPath().resolve(
getBuildTarget().getBasePath()).toString();
Optional<String> entitlementsPlistString =
InfoPlistSubstitution.getVariableExpansionForPlatform(
CODE_SIGN_ENTITLEMENTS,
platformName,
withDefaults(
infoPlistSubstitutions,
ImmutableMap.of(
"SOURCE_ROOT", srcRoot,
"SRCROOT", srcRoot
)));
if (entitlementsPlistString.isPresent()) {
entitlementsPlist = Optional.of(Paths.get(entitlementsPlistString.get()));
}
final Path signingEntitlementsTempPath =
BuildTargets.getScratchPath(getBuildTarget(), "%s.xcent");
stepsBuilder.add(
new ProvisioningProfileCopyStep(
getProjectFilesystem(),
infoPlistOutputPath,
Optional.<String>absent(), // Provisioning profile UUID -- find automatically.
entitlementsPlist,
provisioningProfiles.get(),
bundleDestinationPath.resolve("embedded.mobileprovision"),
signingEntitlementsTempPath)
);
stepsBuilder.add(
new CodeSignStep(
getProjectFilesystem().getRootPath(),
bundleDestinationPath,
signingEntitlementsTempPath,
codeSignIdentity.get().getHash()
)
);
}
// Ensure the bundle directory is archived so we can fetch it later.
buildableContext.recordArtifact(getPathToOutput());
return stepsBuilder.build();
}
@Override
public ImmutableList<Step> getPostBuildSteps(
BuildContext context,
BuildableContext buildableContext) {
if (!hasBinary || debugInfoFormat == DebugInfoFormat.NONE) {
return ImmutableList.of();
}
return ImmutableList.<Step>of(
new Step() {
@Override
public int execute(ExecutionContext context) throws IOException, InterruptedException {
ProcessExecutorParams params = ProcessExecutorParams
.builder()
.addCommand("lldb")
.build();
return context.getProcessExecutor().launchAndExecute(
params,
ImmutableSet.<ProcessExecutor.Option>of(),
Optional.of(
String.format("target create %s\ntarget symbols add %s", bundleRoot, dsymPath)),
Optional.<Long>absent(),
Optional.<Function<Process, Void>>absent()).getExitCode();
}
@Override
public String getShortName() {
return "register debug symbols";
}
@Override
public String getDescription(ExecutionContext context) {
return String.format(
"register debug symbols for binary '%s': '%s'",
bundleRoot,
dsymPath);
}
});
}
public void addStepsToCopyExtensionBundlesDependencies(
ImmutableList.Builder<Step> stepsBuilder) {
for (SourcePath sourcePath : extensionBundlePaths) {
Path plugInsDestPath = bundleRoot.resolve(destinations.getPlugInsPath());
stepsBuilder.add(new MkdirStep(getProjectFilesystem(), plugInsDestPath));
stepsBuilder.add(
CopyStep.forDirectory(
getProjectFilesystem(),
getResolver().getPath(sourcePath),
plugInsDestPath,
CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS));
}
}
static ImmutableMap<String, String> withDefaults(
ImmutableMap<String, String> map,
ImmutableMap<String, String> defaults) {
ImmutableMap.Builder<String, String> builder = ImmutableMap.<String, String>builder()
.putAll(map);
for (ImmutableMap.Entry<String, String> entry : defaults.entrySet()) {
if (!map.containsKey(entry.getKey())) {
builder = builder.put(entry.getKey(), entry.getValue());
}
}
return builder.build();
}
static ImmutableMap<String, NSObject> getInfoPlistOverrideKeys(
String platformName) {
ImmutableMap.Builder<String, NSObject> keys = ImmutableMap.builder();
if (platformName.contains("osx")) {
keys.put("LSRequiresIPhoneOS", new NSNumber(false));
} else {
keys.put("LSRequiresIPhoneOS", new NSNumber(true));
}
return keys.build();
}
static ImmutableMap<String, NSObject> getInfoPlistAdditionalKeys(
String platformName,
String sdkName) {
ImmutableMap.Builder<String, NSObject> keys = ImmutableMap.builder();
if (platformName.contains("osx")) {
keys.put("NSHighResolutionCapable", new NSNumber(true));
keys.put("NSSupportsAutomaticGraphicsSwitching", new NSNumber(true));
}
keys.put("DTPlatformName", new NSString(platformName));
keys.put("DTSDKName", new NSString(sdkName));
return keys.build();
}
private void addResourceProcessingSteps(
Path sourcePath,
Path destinationPath,
ImmutableList.Builder<Step> stepsBuilder) {
String sourcePathExtension = Files.getFileExtension(sourcePath.toString())
.toLowerCase(Locale.US);
switch (sourcePathExtension) {
case "plist":
case "stringsdict":
LOG.debug("Converting plist %s to binary plist %s", sourcePath, destinationPath);
stepsBuilder.add(
new PlistProcessStep(
getProjectFilesystem(),
sourcePath,
destinationPath,
ImmutableMap.<String, NSObject>of(),
ImmutableMap.<String, NSObject>of(),
PlistProcessStep.OutputFormat.BINARY));
break;
case "xib":
String compiledNibFilename = Files.getNameWithoutExtension(destinationPath.toString()) +
".nib";
Path compiledNibPath = destinationPath.getParent().resolve(compiledNibFilename);
LOG.debug("Compiling XIB %s to NIB %s", sourcePath, destinationPath);
stepsBuilder.add(
new IbtoolStep(
getProjectFilesystem(),
ibtool.getCommandPrefix(getResolver()),
sourcePath,
compiledNibPath));
break;
default:
stepsBuilder.add(CopyStep.forFile(getProjectFilesystem(), sourcePath, destinationPath));
break;
}
}
@Override
public boolean isTestedBy(BuildTarget testRule) {
if (tests.contains(testRule)) {
return true;
}
if (binary.isPresent()) {
BuildRule binaryRule = binary.get();
if (binaryRule instanceof NativeTestable) {
return ((NativeTestable) binaryRule).isTestedBy(testRule);
}
}
return false;
}
@Override
public CxxPreprocessorInput getCxxPreprocessorInput(
TargetGraph targetGraph,
CxxPlatform cxxPlatform,
HeaderVisibility headerVisibility) {
if (binary.isPresent()) {
BuildRule binaryRule = binary.get();
if (binaryRule instanceof NativeTestable) {
return ((NativeTestable) binaryRule).getCxxPreprocessorInput(
targetGraph,
cxxPlatform,
headerVisibility);
}
}
return CxxPreprocessorInput.EMPTY;
}
}
| |
/*
* Copyright 2009-2020 Aarhus University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.brics.tajs.monitoring.inspector.datacollection.monitors;
import dk.brics.tajs.flowgraph.AbstractNode;
import dk.brics.tajs.flowgraph.BasicBlock;
import dk.brics.tajs.flowgraph.Function;
import dk.brics.tajs.flowgraph.SourceLocation;
import dk.brics.tajs.lattice.State;
import dk.brics.tajs.monitoring.DefaultAnalysisMonitoring;
import dk.brics.tajs.monitoring.inspector.datacollection.SourceLine;
import dk.brics.tajs.monitoring.inspector.util.OccurenceCountingMap;
import dk.brics.tajs.solver.NodeAndContext;
import dk.brics.tajs.util.Collectors;
import java.net.URL;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static dk.brics.tajs.util.Collections.newMap;
import static dk.brics.tajs.util.Collections.newSet;
public class VisitationMonitoring extends DefaultAnalysisMonitoring {
/**
* Contains all nodes that are analyzed
*/
private final Set<AbstractNode> visitedNodes;
/**
* A counter for how many nodes there are on each line
*/
private final OccurenceCountingMap<SourceLine> nodesPerLineMap;
/**
* A counter for how many blocks there are on each line
*/
private final OccurenceCountingMap<SourceLine> blocksPerLineMap;
/**
* A counter for how many times blocks on a line have been analyzed
*/
private final OccurenceCountingMap<SourceLine> visitCountMapByBlocks;
/**
* A counter for how many times each node and context have been visited
*/
private final Map<SourceLine, OccurenceCountingMap<NodeAndContext>> visitedNodesAndContexts;
/**
* Contains all the functions that have been analyzed
*/
private final Set<Function> seenFunctions = newSet();
public VisitationMonitoring() {
this.visitCountMapByBlocks = new OccurenceCountingMap<>();
this.visitedNodes = new HashSet<>();
this.nodesPerLineMap = new OccurenceCountingMap<>();
this.blocksPerLineMap = new OccurenceCountingMap<>();
this.visitedNodesAndContexts = newMap();
}
private static OccurenceCountingMap<SourceLine> makeBlocksPerLineMap(Function f) {
OccurenceCountingMap<SourceLine> map = new OccurenceCountingMap<>();
for (BasicBlock b : f.getBlocks()) {
Set<Integer> lines = new HashSet<>();
for (AbstractNode n : b.getNodes()) {
if (!n.isArtificial()) {
final int lineNumber = n.getSourceLocation().getLineNumber();
if (!lines.contains(lineNumber)) {
// count each line in each block exactly once, regardless of how many nodes each block has
final SourceLine lineSourceLocation = makeWithUnspecifiedColumn(n.getSourceLocation());
if (lineSourceLocation != null) {
map.count(lineSourceLocation);
}
lines.add(lineNumber);
}
}
}
}
return map;
}
private static OccurenceCountingMap<SourceLine> makeNodesPerLineMap(Function f) {
OccurenceCountingMap<SourceLine> map = new OccurenceCountingMap<>();
for (BasicBlock b : f.getBlocks()) {
for (AbstractNode n : b.getNodes()) {
final SourceLine lineSourceLocation = makeWithUnspecifiedColumn(n.getSourceLocation());
if (lineSourceLocation != null) {
map.count(lineSourceLocation);
}
}
}
return map;
}
private static SourceLine makeWithUnspecifiedColumn(SourceLocation sourceLocation) {
URL location = sourceLocation.getLocation();
if (location != null) {
return new SourceLine(location, sourceLocation.getLineNumber());
}
return null;
}
public Info createLineVisitingInfo() {
return new Info(visitCountMapByBlocks, visitedNodes, blocksPerLineMap, nodesPerLineMap, visitedNodesAndContexts);
}
private void addFunction(Function f) {
if (seenFunctions.contains(f)) {
return;
}
seenFunctions.add(f);
nodesPerLineMap.countAll(makeNodesPerLineMap(f));
blocksPerLineMap.countAll(makeBlocksPerLineMap(f));
}
@Override
public void visitNodeTransferPre(AbstractNode n, State state) {
visitedNodes.add(n);
}
@Override
public void visitBlockTransferPost(BasicBlock block, State state) {
addFunction(block.getFunction());
Set<SourceLine> lines = new HashSet<>();
for (AbstractNode node : block.getNodes()) {
if (node.getSourceLocation().getLocation() == null)
continue; // node js artificial nodes doesn't have a corresponding file location, ReportMaker reads it.
final SourceLocation sourceLocation = node.getSourceLocation();
// only count each line once per transfer
final SourceLine lineSourceLocation = makeWithUnspecifiedColumn(sourceLocation);
if (!lines.contains(lineSourceLocation)) { // only count each line once per block transfer
// System.out.format("Visiting line %d with node of type %s (column: %s)%n", lineNumber, node.getClass().getSimpleName(), sourceLocation.getColumnNumber());
visitCountMapByBlocks.count(lineSourceLocation);
lines.add(lineSourceLocation);
}
if (!visitedNodesAndContexts.containsKey(lineSourceLocation)) {
visitedNodesAndContexts.put(lineSourceLocation, new OccurenceCountingMap<>());
}
visitedNodesAndContexts.get(lineSourceLocation).count(new NodeAndContext<>(node, state.getContext()));
}
}
public static class Info {
private final Set<AbstractNode> visitedNodes;
private final OccurenceCountingMap<SourceLine> blockVisitCountsPerLine;
private final OccurenceCountingMap<SourceLine> blocksPerLine;
private final OccurenceCountingMap<SourceLine> nodesPerLine;
private final Map<SourceLine, OccurenceCountingMap<NodeAndContext>> visitedNodesAndContexts;
public Info(OccurenceCountingMap<SourceLine> blockVisitCountsPerLine, Set<AbstractNode> visitedNodes, OccurenceCountingMap<SourceLine> blocksPerLine, OccurenceCountingMap<SourceLine> nodesPerLine, Map<SourceLine, OccurenceCountingMap<NodeAndContext>> visitedNodesAndContexts) {
this.blockVisitCountsPerLine = blockVisitCountsPerLine;
this.visitedNodes = visitedNodes;
this.blocksPerLine = blocksPerLine;
this.nodesPerLine = nodesPerLine;
this.visitedNodesAndContexts = visitedNodesAndContexts;
}
public OccurenceCountingMap<SourceLine> getBlocksPerLine() {
return blocksPerLine;
}
public OccurenceCountingMap<SourceLine> getNodesPerLine() {
return nodesPerLine;
}
public OccurenceCountingMap<SourceLine> getBlockVisitCountsPerLine() {
return blockVisitCountsPerLine;
}
public Map<URL, Set<Integer>> getAbstractLiveLines() {
return convertNodeSetToURLLineSetMap(visitedNodes);
}
private Map<URL, Set<Integer>> convertNodeSetToURLLineSetMap(Set<AbstractNode> nodes) {
return nodes.stream()
.filter(n -> n.getSourceLocation().getLocation() != null)
.collect(Collectors.groupingBy(n -> n.getSourceLocation().getLocation(), java.util.stream.Collectors.mapping(n -> n.getSourceLocation().getLineNumber(), Collectors.toSet())));
}
public Map<SourceLine, OccurenceCountingMap<NodeAndContext>> getVisitedNodesAndContexts() {
return visitedNodesAndContexts;
}
}
}
| |
package com.dianping.cat.message.codec;
import io.netty.buffer.ByteBuf;
import java.io.UnsupportedEncodingException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.unidal.lookup.annotation.Inject;
import com.dianping.cat.message.Event;
import com.dianping.cat.message.Heartbeat;
import com.dianping.cat.message.Message;
import com.dianping.cat.message.Metric;
import com.dianping.cat.message.Trace;
import com.dianping.cat.message.Transaction;
import com.dianping.cat.message.spi.MessageCodec;
import com.dianping.cat.message.spi.MessageTree;
import com.dianping.cat.message.spi.codec.BufferWriter;
/**
* Local use only, do not use it over network since it only supports one-way encoding
*/
public class HtmlMessageCodec implements MessageCodec, Initializable {
public static final String ID = "html";
private static final String VERSION = "HT2"; // HTML version 2 since Mar 20, 2013
@Inject
private BufferWriter m_writer;
@Inject
private String m_logViewPrefix = "/cat/r/m/";
private BufferHelper m_bufferHelper;
private DateHelper m_dateHelper = new DateHelper();
@Override
public MessageTree decode(ByteBuf buf) {
throw new UnsupportedOperationException("HtmlMessageCodec only supports one-way encoding!");
}
@Override
public void decode(ByteBuf buf, MessageTree tree) {
throw new UnsupportedOperationException("HtmlMessageCodec only supports one-way encoding!");
}
@Override
public void encode(MessageTree tree, ByteBuf buf) {
int count = 0;
int index = buf.writerIndex();
BufferHelper helper = m_bufferHelper;
buf.writeInt(0); // place-holder
count += helper.table1(buf);
count += helper.crlf(buf);
count += encodeHeader(tree, buf);
if (tree.getMessage() != null) {
count += encodeMessage(tree, tree.getMessage(), buf, 0, new LineCounter());
}
count += helper.table2(buf);
buf.setInt(index, count);
}
protected int encodeFooter(MessageTree tree, ByteBuf buf) {
return 0;
}
protected int encodeHeader(MessageTree tree, ByteBuf buf) {
BufferHelper helper = m_bufferHelper;
StringBuilder sb = new StringBuilder(1024);
String parentMessageId = tree.getParentMessageId();
String rootMessageId = tree.getRootMessageId();
String domain = tree.getDomain();
sb.append("<tr class=\"header\"><td colspan=5>");
if (rootMessageId != null && !rootMessageId.equalsIgnoreCase("null")) {
sb.append(String.format("<a href='%s?domain=%s'>RootLogview</a> ", rootMessageId, domain));
if (!parentMessageId.equals(rootMessageId)) {
sb.append(String.format("<a href='%s?domain=%s'>ParentLogview</a> ", parentMessageId, domain));
}
}
sb.append(VERSION).append(" ").append(tree.getDomain()).append(" ");
sb.append(tree.getHostName()).append(" ").append(tree.getIpAddress()).append(" ");
sb.append(tree.getThreadGroupName()).append(" ").append(tree.getThreadId()).append(" ");
sb.append(tree.getThreadName()).append(" ").append(tree.getMessageId()).append(" ");
sb.append(parentMessageId).append(" ").append(rootMessageId).append(" ");
sb.append(tree.getSessionToken()).append(" ");
sb.append("</td></tr>");
return helper.write(buf, sb.toString());
}
protected int encodeLine(MessageTree tree, Message message, ByteBuf buf, char type, Policy policy, int level,
LineCounter counter) {
BufferHelper helper = m_bufferHelper;
int count = 0;
if (counter != null) {
counter.inc();
count += helper.tr1(buf, counter.getCount() % 2 != 0 ? "odd" : "even");
} else {
count += helper.tr1(buf, null);
}
count += helper.td1(buf);
count += helper.nbsp(buf, level * 2); // 2 spaces per level
count += helper.write(buf, (byte) type);
if (type == 'T' && message instanceof Transaction) {
long duration = ((Transaction) message).getDurationInMillis();
count += helper.write(buf, m_dateHelper.format(message.getTimestamp() + duration));
} else {
count += helper.write(buf, m_dateHelper.format(message.getTimestamp()));
}
count += helper.td2(buf);
count += helper.td(buf, message.getType());
count += helper.td(buf, message.getName());
if (policy != Policy.WITHOUT_STATUS) {
if (Message.SUCCESS.equals(message.getStatus())) {
count += helper.td(buf, " "); // do not output "0"
} else {
count += helper.td(buf, message.getStatus(), "class=\"error\"");
}
Object data = message.getData();
count += helper.td1(buf);
if (policy == Policy.WITH_DURATION && message instanceof Transaction) {
long durationInMicro = ((Transaction) message).getDurationInMicros();
long durationInMillis = durationInMicro / 1000L;
if (durationInMicro < 100L) {
count += helper.write(buf, String.format("%.2f", durationInMicro / 1000.0));
} else if (durationInMicro < 10000L) { // less than 10 ms
count += helper.write(buf, String.format("%.2f", durationInMicro / 1000.0));
} else { // no fraction
count += helper.write(buf, Long.toString(durationInMillis));
}
count += helper.write(buf, "ms ");
}
count += helper.writeRaw(buf, String.valueOf(data));
count += helper.td2(buf);
} else {
count += helper.td(buf, "");
count += helper.td(buf, "");
}
count += helper.tr2(buf);
count += helper.crlf(buf);
return count;
}
protected int encodeLogViewLink(MessageTree tree, Message message, ByteBuf buf, int level, LineCounter counter) {
BufferHelper helper = m_bufferHelper;
Map<String, String> links = parseLinks(message.getData().toString());
int count = 0;
for (Map.Entry<String, String> e : links.entrySet()) {
String link = e.getKey();
String title = e.getValue();
if (title.length() == 0) {
title = "show";
}
if (counter != null) {
counter.inc();
count += helper.tr1(buf, "link");
} else {
count += helper.tr1(buf, null);
}
count += helper.td1(buf);
count += helper.nbsp(buf, level * 2); // 2 spaces per level
count += helper.write(buf,
String.format("<a href=\"%s%s\" onclick=\"return show(this,'%s');\">[:: %s ::]</a>", //
m_logViewPrefix, link, link, title));
count += helper.td2(buf);
count += helper.td(buf, "<div id=\"" + link + "\"></div>", "colspan=\"4\"");
count += helper.tr2(buf);
count += helper.crlf(buf);
}
return count;
}
protected int encodeMessage(MessageTree tree, Message message, ByteBuf buf, int level, LineCounter counter) {
if (message instanceof Transaction) {
Transaction transaction = (Transaction) message;
List<Message> children = transaction.getChildren();
if (children.isEmpty()) {
if (transaction.getDurationInMillis() < 0) {
return encodeLine(tree, transaction, buf, 't', Policy.WITHOUT_STATUS, level, counter);
} else {
return encodeLine(tree, transaction, buf, 'A', Policy.WITH_DURATION, level, counter);
}
} else {
int count = 0;
count += encodeLine(tree, transaction, buf, 't', Policy.WITHOUT_STATUS, level, counter);
for (Message child : children) {
count += encodeMessage(tree, child, buf, level + 1, counter);
}
count += encodeLine(tree, transaction, buf, 'T', Policy.WITH_DURATION, level, counter);
return count;
}
} else if (message instanceof Event) {
String type = message.getType();
if ("RemoteCall".equals(type)) {
return encodeLogViewLink(tree, message, buf, level, counter);
} else if ("RemoteLink".equals(type)) {
return encodeRemoteLink(tree, message, buf, level, counter);
} else {
return encodeLine(tree, message, buf, 'E', Policy.DEFAULT, level, counter);
}
} else if (message instanceof Trace) {
return encodeLine(tree, message, buf, 'L', Policy.DEFAULT, level, counter);
} else if (message instanceof Metric) {
return encodeLine(tree, message, buf, 'M', Policy.DEFAULT, level, counter);
} else if (message instanceof Heartbeat) {
return encodeLine(tree, message, buf, 'H', Policy.DEFAULT, level, counter);
} else {
throw new RuntimeException(String.format("Unsupported message type: %s.", message.getClass()));
}
}
protected int encodeRemoteLink(MessageTree tree, Message message, ByteBuf buf, int level, LineCounter counter) {
BufferHelper helper = m_bufferHelper;
int count = 0;
if (counter != null) {
counter.inc();
count += helper.tr1(buf, "link");
} else {
count += helper.tr1(buf, null);
}
String link = message.getData().toString();
String name = message.getName();
count += helper.td1(buf);
count += helper.nbsp(buf, level * 2); // 2 spaces per level
count += helper.write(buf,
String.format("<a href=\"%s%s\" onclick=\"return show(this,'%s');\">[:: %s ::]</a>", //
m_logViewPrefix, link, link, name));
count += helper.td2(buf);
count += helper.td(buf, "<div id=\"" + link + "\"></div>", "colspan=\"4\"");
count += helper.tr2(buf);
count += helper.crlf(buf);
return count;
}
@Override
public void initialize() throws InitializationException {
m_bufferHelper = new BufferHelper(m_writer);
}
protected Map<String, String> parseLinks(String str) {
Map<String, String> links = new LinkedHashMap<String, String>();
int len = str.length();
StringBuilder name = new StringBuilder();
StringBuilder value = new StringBuilder();
boolean inName = true;
for (int i = 0; i < len; i++) {
char ch = str.charAt(i);
switch (ch) {
case '&':
links.put(name.toString(), value.toString());
name.setLength(0);
value.setLength(0);
inName = true;
break;
case '=':
if (inName) {
inName = false;
break;
}
// fall through
default:
if (inName) {
name.append(ch);
} else {
value.append(ch);
}
break;
}
}
if (name.length() > 0) {
links.put(name.toString(), value.toString());
}
return links;
}
public void setBufferWriter(BufferWriter writer) {
m_writer = writer;
m_bufferHelper = new BufferHelper(m_writer);
}
public void setLogViewPrefix(String logViewPrefix) {
m_logViewPrefix = logViewPrefix;
}
protected static class BufferHelper {
private static byte[] TABLE1 = "<table class=\"logview\">".getBytes();
private static byte[] TABLE2 = "</table>".getBytes();
private static byte[] TR1 = "<tr>".getBytes();
private static byte[] TR2 = "</tr>".getBytes();
private static byte[] TD1 = "<td>".getBytes();
private static byte[] TD2 = "</td>".getBytes();
private static byte[] NBSP = " ".getBytes();
private static byte[] CRLF = "\r\n".getBytes();
private BufferWriter m_writer;
public BufferHelper(BufferWriter writer) {
m_writer = writer;
}
public int crlf(ByteBuf buf) {
buf.writeBytes(CRLF);
return CRLF.length;
}
public int nbsp(ByteBuf buf, int count) {
for (int i = 0; i < count; i++) {
buf.writeBytes(NBSP);
}
return count * NBSP.length;
}
public int table1(ByteBuf buf) {
buf.writeBytes(TABLE1);
return TABLE1.length;
}
public int table2(ByteBuf buf) {
buf.writeBytes(TABLE2);
return TABLE2.length;
}
public int td(ByteBuf buf, String str) {
return td(buf, str, null);
}
public int td(ByteBuf buf, String str, String attributes) {
if (str == null) {
str = "null";
}
byte[] data = str.getBytes();
int count = 0;
if (attributes == null) {
buf.writeBytes(TD1);
count += TD1.length;
} else {
String tag = "<td " + attributes + ">";
byte[] bytes = tag.getBytes();
buf.writeBytes(bytes);
count += bytes.length;
}
buf.writeBytes(data);
count += data.length;
buf.writeBytes(TD2);
count += TD2.length;
return count;
}
public int td1(ByteBuf buf) {
buf.writeBytes(TD1);
return TD1.length;
}
public int td1(ByteBuf buf, String attributes) {
if (attributes == null) {
buf.writeBytes(TD1);
return TD1.length;
} else {
String tag = "<td " + attributes + ">";
byte[] bytes = tag.getBytes();
buf.writeBytes(bytes);
return bytes.length;
}
}
public int td2(ByteBuf buf) {
buf.writeBytes(TD2);
return TD2.length;
}
public int tr1(ByteBuf buf, String styleClass) {
if (styleClass == null) {
buf.writeBytes(TR1);
return TR1.length;
} else {
String tag = "<tr class=\"" + styleClass + "\">";
byte[] bytes = tag.getBytes();
buf.writeBytes(bytes);
return bytes.length;
}
}
public int tr2(ByteBuf buf) {
buf.writeBytes(TR2);
return TR2.length;
}
public int write(ByteBuf buf, byte b) {
buf.writeByte(b);
return 1;
}
public int write(ByteBuf buf, String str) {
if (str == null) {
str = "null";
}
byte[] data = str.getBytes();
buf.writeBytes(data);
return data.length;
}
public int writeRaw(ByteBuf buf, String str) {
if (str == null) {
str = "null";
}
byte[] data;
try {
data = str.getBytes("utf-8");
} catch (UnsupportedEncodingException e) {
data = str.getBytes();
}
return m_writer.writeTo(buf, data, true);
}
}
/**
* Thread safe date helper class. DateFormat is NOT thread safe.
*/
protected static class DateHelper {
private static final String DATE_PATTERN = "HH:mm:ss.SSS";
private BlockingQueue<SimpleDateFormat> m_queue = new ArrayBlockingQueue<SimpleDateFormat>(20);
public String format(long timestamp) {
SimpleDateFormat format = m_queue.poll();
if (format == null) {
format = new SimpleDateFormat(DATE_PATTERN);
format.setTimeZone(TimeZone.getTimeZone("GMT+8"));
}
try {
return format.format(new Date(timestamp));
} finally {
if (m_queue.remainingCapacity() > 0) {
m_queue.offer(format);
}
}
}
public long parse(String str) {
SimpleDateFormat format = m_queue.poll();
if (format == null) {
format = new SimpleDateFormat(DATE_PATTERN);
format.setTimeZone(TimeZone.getTimeZone("GMT+8"));
}
try {
return format.parse(str).getTime();
} catch (ParseException e) {
return -1;
} finally {
if (m_queue.remainingCapacity() > 0) {
m_queue.offer(format);
}
}
}
}
protected static class LineCounter {
private int m_count;
public int getCount() {
return m_count;
}
public void inc() {
m_count++;
}
}
protected static enum Policy {
DEFAULT,
WITHOUT_STATUS,
WITH_DURATION;
public static Policy getByMessageIdentifier(byte identifier) {
switch (identifier) {
case 't':
return WITHOUT_STATUS;
case 'T':
case 'A':
return WITH_DURATION;
case 'E':
case 'H':
return DEFAULT;
default:
return DEFAULT;
}
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.topn;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.primitives.Ints;
import com.google.inject.Inject;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.granularity.QueryGranularity;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.guava.nary.BinaryFn;
import io.druid.query.BaseQuery;
import io.druid.query.BySegmentResultValue;
import io.druid.query.CacheStrategy;
import io.druid.query.DruidMetrics;
import io.druid.query.IntervalChunkingQueryRunnerDecorator;
import io.druid.query.Query;
import io.druid.query.QueryCacheHelper;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.Result;
import io.druid.query.ResultGranularTimestampComparator;
import io.druid.query.ResultMergeQueryRunner;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.AggregatorUtil;
import io.druid.query.aggregation.MetricManipulationFn;
import io.druid.query.aggregation.PostAggregator;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.filter.DimFilter;
import org.joda.time.DateTime;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
*/
public class TopNQueryQueryToolChest extends QueryToolChest<Result<TopNResultValue>, TopNQuery>
{
private static final byte TOPN_QUERY = 0x1;
private static final TypeReference<Result<TopNResultValue>> TYPE_REFERENCE = new TypeReference<Result<TopNResultValue>>()
{
};
private static final TypeReference<Object> OBJECT_TYPE_REFERENCE = new TypeReference<Object>()
{
};
private final TopNQueryConfig config;
private final IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator;
@Inject
public TopNQueryQueryToolChest(
TopNQueryConfig config,
IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator
)
{
this.config = config;
this.intervalChunkingQueryRunnerDecorator = intervalChunkingQueryRunnerDecorator;
}
protected static String[] extractFactoryName(final List<AggregatorFactory> aggregatorFactories)
{
return Lists.transform(
aggregatorFactories, new Function<AggregatorFactory, String>()
{
@Nullable
@Override
public String apply(@Nullable AggregatorFactory input)
{
return input.getName();
}
}
).toArray(new String[0]);
}
private static List<PostAggregator> prunePostAggregators(TopNQuery query)
{
return AggregatorUtil.pruneDependentPostAgg(
query.getPostAggregatorSpecs(),
query.getTopNMetricSpec().getMetricName(query.getDimensionSpec())
);
}
@Override
public QueryRunner<Result<TopNResultValue>> mergeResults(
QueryRunner<Result<TopNResultValue>> runner
)
{
return new ResultMergeQueryRunner<Result<TopNResultValue>>(runner)
{
@Override
protected Ordering<Result<TopNResultValue>> makeOrdering(Query<Result<TopNResultValue>> query)
{
return ResultGranularTimestampComparator.create(
((TopNQuery) query).getGranularity(), query.isDescending()
);
}
@Override
protected BinaryFn<Result<TopNResultValue>, Result<TopNResultValue>, Result<TopNResultValue>> createMergeFn(
Query<Result<TopNResultValue>> input
)
{
TopNQuery query = (TopNQuery) input;
return new TopNBinaryFn(
TopNResultMerger.identity,
query.getGranularity(),
query.getDimensionSpec(),
query.getTopNMetricSpec(),
query.getThreshold(),
query.getAggregatorSpecs(),
query.getPostAggregatorSpecs()
);
}
};
}
@Override
public ServiceMetricEvent.Builder makeMetricBuilder(TopNQuery query)
{
return DruidMetrics.makePartialQueryTimeMetric(query)
.setDimension(
"threshold",
String.valueOf(query.getThreshold())
)
.setDimension("dimension", query.getDimensionSpec().getDimension())
.setDimension(
"numMetrics",
String.valueOf(query.getAggregatorSpecs().size())
)
.setDimension(
"numComplexMetrics",
String.valueOf(DruidMetrics.findNumComplexAggs(query.getAggregatorSpecs()))
);
}
@Override
public Function<Result<TopNResultValue>, Result<TopNResultValue>> makePreComputeManipulatorFn(
final TopNQuery query, final MetricManipulationFn fn
)
{
return new Function<Result<TopNResultValue>, Result<TopNResultValue>>()
{
private String dimension = query.getDimensionSpec().getOutputName();
private final List<PostAggregator> prunedAggs = prunePostAggregators(query);
private final AggregatorFactory[] aggregatorFactories = query.getAggregatorSpecs()
.toArray(new AggregatorFactory[0]);
private final String[] aggFactoryNames = extractFactoryName(query.getAggregatorSpecs());
@Override
public Result<TopNResultValue> apply(Result<TopNResultValue> result)
{
List<Map<String, Object>> serializedValues = Lists.newArrayList(
Iterables.transform(
result.getValue(),
new Function<DimensionAndMetricValueExtractor, Map<String, Object>>()
{
@Override
public Map<String, Object> apply(DimensionAndMetricValueExtractor input)
{
final Map<String, Object> values = Maps.newHashMapWithExpectedSize(
aggregatorFactories.length
+ prunedAggs.size()
+ 1
);
for (int i = 0; i < aggregatorFactories.length; ++i) {
final String aggName = aggFactoryNames[i];
values.put(aggName, fn.manipulate(aggregatorFactories[i], input.getMetric(aggName)));
}
for (PostAggregator postAgg : prunedAggs) {
final String name = postAgg.getName();
Object calculatedPostAgg = input.getMetric(name);
if (calculatedPostAgg != null) {
values.put(name, calculatedPostAgg);
} else {
values.put(name, postAgg.compute(values));
}
}
values.put(dimension, input.getDimensionValue(dimension));
return values;
}
}
)
);
return new Result<TopNResultValue>(
result.getTimestamp(),
new TopNResultValue(serializedValues)
);
}
};
}
@Override
public Function<Result<TopNResultValue>, Result<TopNResultValue>> makePostComputeManipulatorFn(
final TopNQuery query, final MetricManipulationFn fn
)
{
return new Function<Result<TopNResultValue>, Result<TopNResultValue>>()
{
private String dimension = query.getDimensionSpec().getOutputName();
private final AggregatorFactory[] aggregatorFactories = query.getAggregatorSpecs()
.toArray(new AggregatorFactory[0]);
private final String[] aggFactoryNames = extractFactoryName(query.getAggregatorSpecs());
private final PostAggregator[] postAggregators = query.getPostAggregatorSpecs().toArray(new PostAggregator[0]);
@Override
public Result<TopNResultValue> apply(Result<TopNResultValue> result)
{
List<Map<String, Object>> serializedValues = Lists.newArrayList(
Iterables.transform(
result.getValue(),
new Function<DimensionAndMetricValueExtractor, Map<String, Object>>()
{
@Override
public Map<String, Object> apply(DimensionAndMetricValueExtractor input)
{
final Map<String, Object> values = Maps.newHashMapWithExpectedSize(
aggregatorFactories.length
+ query.getPostAggregatorSpecs().size()
+ 1
);
for (int i = 0; i < aggFactoryNames.length; ++i) {
final String name = aggFactoryNames[i];
values.put(name, input.getMetric(name));
}
for (PostAggregator postAgg : postAggregators) {
Object calculatedPostAgg = input.getMetric(postAgg.getName());
if (calculatedPostAgg != null) {
values.put(postAgg.getName(), calculatedPostAgg);
} else {
values.put(postAgg.getName(), postAgg.compute(values));
}
}
for (int i = 0; i < aggFactoryNames.length; ++i) {
final String name = aggFactoryNames[i];
values.put(name, fn.manipulate(aggregatorFactories[i], input.getMetric(name)));
}
values.put(dimension, input.getDimensionValue(dimension));
return values;
}
}
)
);
return new Result<>(
result.getTimestamp(),
new TopNResultValue(serializedValues)
);
}
};
}
@Override
public TypeReference<Result<TopNResultValue>> getResultTypeReference()
{
return TYPE_REFERENCE;
}
@Override
public CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> getCacheStrategy(final TopNQuery query)
{
return new CacheStrategy<Result<TopNResultValue>, Object, TopNQuery>()
{
private final List<AggregatorFactory> aggs = Lists.newArrayList(query.getAggregatorSpecs());
private final List<PostAggregator> postAggs = AggregatorUtil.pruneDependentPostAgg(
query.getPostAggregatorSpecs(),
query.getTopNMetricSpec()
.getMetricName(query.getDimensionSpec())
);
@Override
public byte[] computeCacheKey(TopNQuery query)
{
final byte[] dimensionSpecBytes = query.getDimensionSpec().getCacheKey();
final byte[] metricSpecBytes = query.getTopNMetricSpec().getCacheKey();
final DimFilter dimFilter = query.getDimensionsFilter();
final byte[] filterBytes = dimFilter == null ? new byte[]{} : dimFilter.getCacheKey();
final byte[] aggregatorBytes = QueryCacheHelper.computeAggregatorBytes(query.getAggregatorSpecs());
final byte[] granularityBytes = query.getGranularity().cacheKey();
return ByteBuffer
.allocate(
1 + dimensionSpecBytes.length + metricSpecBytes.length + 4 +
granularityBytes.length + filterBytes.length + aggregatorBytes.length
)
.put(TOPN_QUERY)
.put(dimensionSpecBytes)
.put(metricSpecBytes)
.put(Ints.toByteArray(query.getThreshold()))
.put(granularityBytes)
.put(filterBytes)
.put(aggregatorBytes)
.array();
}
@Override
public TypeReference<Object> getCacheObjectClazz()
{
return OBJECT_TYPE_REFERENCE;
}
@Override
public Function<Result<TopNResultValue>, Object> prepareForCache()
{
return new Function<Result<TopNResultValue>, Object>()
{
private final String[] aggFactoryNames = extractFactoryName(query.getAggregatorSpecs());
@Override
public Object apply(final Result<TopNResultValue> input)
{
List<DimensionAndMetricValueExtractor> results = Lists.newArrayList(input.getValue());
final List<Object> retVal = Lists.newArrayListWithCapacity(results.size() + 1);
// make sure to preserve timezone information when caching results
retVal.add(input.getTimestamp().getMillis());
for (DimensionAndMetricValueExtractor result : results) {
List<Object> vals = Lists.newArrayListWithCapacity(aggFactoryNames.length + 2);
vals.add(result.getDimensionValue(query.getDimensionSpec().getOutputName()));
for (String aggName : aggFactoryNames) {
vals.add(result.getMetric(aggName));
}
retVal.add(vals);
}
return retVal;
}
};
}
@Override
public Function<Object, Result<TopNResultValue>> pullFromCache()
{
return new Function<Object, Result<TopNResultValue>>()
{
private final QueryGranularity granularity = query.getGranularity();
@Override
public Result<TopNResultValue> apply(Object input)
{
List<Object> results = (List<Object>) input;
List<Map<String, Object>> retVal = Lists.newArrayListWithCapacity(results.size());
Iterator<Object> inputIter = results.iterator();
DateTime timestamp = granularity.toDateTime(((Number) inputIter.next()).longValue());
while (inputIter.hasNext()) {
List<Object> result = (List<Object>) inputIter.next();
Map<String, Object> vals = Maps.newLinkedHashMap();
Iterator<AggregatorFactory> aggIter = aggs.iterator();
Iterator<Object> resultIter = result.iterator();
vals.put(query.getDimensionSpec().getOutputName(), resultIter.next());
while (aggIter.hasNext() && resultIter.hasNext()) {
final AggregatorFactory factory = aggIter.next();
vals.put(factory.getName(), factory.deserialize(resultIter.next()));
}
for (PostAggregator postAgg : postAggs) {
vals.put(postAgg.getName(), postAgg.compute(vals));
}
retVal.add(vals);
}
return new Result<>(timestamp, new TopNResultValue(retVal));
}
};
}
};
}
@Override
public QueryRunner<Result<TopNResultValue>> preMergeQueryDecoration(final QueryRunner<Result<TopNResultValue>> runner)
{
return intervalChunkingQueryRunnerDecorator.decorate(
new QueryRunner<Result<TopNResultValue>>()
{
@Override
public Sequence<Result<TopNResultValue>> run(
Query<Result<TopNResultValue>> query, Map<String, Object> responseContext
)
{
TopNQuery topNQuery = (TopNQuery) query;
if (topNQuery.getDimensionsFilter() != null) {
topNQuery = topNQuery.withDimFilter(topNQuery.getDimensionsFilter().optimize());
}
final TopNQuery delegateTopNQuery = topNQuery;
if (TopNQueryEngine.canApplyExtractionInPost(delegateTopNQuery)) {
final DimensionSpec dimensionSpec = delegateTopNQuery.getDimensionSpec();
return runner.run(
delegateTopNQuery.withDimensionSpec(
new DefaultDimensionSpec(
dimensionSpec.getDimension(),
dimensionSpec.getOutputName()
)
), responseContext
);
} else {
return runner.run(delegateTopNQuery, responseContext);
}
}
}
, this
);
}
@Override
public QueryRunner<Result<TopNResultValue>> postMergeQueryDecoration(final QueryRunner<Result<TopNResultValue>> runner)
{
final ThresholdAdjustingQueryRunner thresholdRunner = new ThresholdAdjustingQueryRunner(
runner,
config
);
return new QueryRunner<Result<TopNResultValue>>()
{
@Override
public Sequence<Result<TopNResultValue>> run(
final Query<Result<TopNResultValue>> query, final Map<String, Object> responseContext
)
{
// thresholdRunner.run throws ISE if query is not TopNQuery
final Sequence<Result<TopNResultValue>> resultSequence = thresholdRunner.run(query, responseContext);
final TopNQuery topNQuery = (TopNQuery) query;
if (!TopNQueryEngine.canApplyExtractionInPost(topNQuery)) {
return resultSequence;
} else {
return Sequences.map(
resultSequence, new Function<Result<TopNResultValue>, Result<TopNResultValue>>()
{
@Override
public Result<TopNResultValue> apply(Result<TopNResultValue> input)
{
TopNResultValue resultValue = input.getValue();
return new Result<TopNResultValue>(
input.getTimestamp(),
new TopNResultValue(
Lists.transform(
resultValue.getValue(),
new Function<DimensionAndMetricValueExtractor, DimensionAndMetricValueExtractor>()
{
@Override
public DimensionAndMetricValueExtractor apply(
DimensionAndMetricValueExtractor input
)
{
String dimOutputName = topNQuery.getDimensionSpec().getOutputName();
Object dimValue = input.getDimensionValue(dimOutputName);
Map<String, Object> map = input.getBaseObject();
map.put(
dimOutputName,
topNQuery.getDimensionSpec().getExtractionFn().apply(dimValue)
);
return input;
}
}
)
)
);
}
}
);
}
}
};
}
static class ThresholdAdjustingQueryRunner implements QueryRunner<Result<TopNResultValue>>
{
private final QueryRunner<Result<TopNResultValue>> runner;
private final TopNQueryConfig config;
public ThresholdAdjustingQueryRunner(
QueryRunner<Result<TopNResultValue>> runner,
TopNQueryConfig config
)
{
this.runner = runner;
this.config = config;
}
@Override
public Sequence<Result<TopNResultValue>> run(
Query<Result<TopNResultValue>> input,
Map<String, Object> responseContext
)
{
if (!(input instanceof TopNQuery)) {
throw new ISE("Can only handle [%s], got [%s]", TopNQuery.class, input.getClass());
}
final TopNQuery query = (TopNQuery) input;
final int minTopNThreshold = query.getContextValue("minTopNThreshold", config.getMinTopNThreshold());
if (query.getThreshold() > minTopNThreshold) {
return runner.run(query, responseContext);
}
final boolean isBySegment = BaseQuery.getContextBySegment(query, false);
return Sequences.map(
runner.run(query.withThreshold(minTopNThreshold), responseContext),
new Function<Result<TopNResultValue>, Result<TopNResultValue>>()
{
@Override
public Result<TopNResultValue> apply(Result<TopNResultValue> input)
{
if (isBySegment) {
BySegmentResultValue<Result<TopNResultValue>> value = (BySegmentResultValue<Result<TopNResultValue>>) input
.getValue();
return new Result<TopNResultValue>(
input.getTimestamp(),
new BySegmentTopNResultValue(
Lists.transform(
value.getResults(),
new Function<Result<TopNResultValue>, Result<TopNResultValue>>()
{
@Override
public Result<TopNResultValue> apply(Result<TopNResultValue> input)
{
return new Result<>(
input.getTimestamp(),
new TopNResultValue(
Lists.<Object>newArrayList(
Iterables.limit(
input.getValue(),
query.getThreshold()
)
)
)
);
}
}
),
value.getSegmentId(),
value.getInterval()
)
);
}
return new Result<>(
input.getTimestamp(),
new TopNResultValue(
Lists.<Object>newArrayList(
Iterables.limit(
input.getValue(),
query.getThreshold()
)
)
)
);
}
}
);
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.NumericRangeFilter;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeShortValue;
import static org.elasticsearch.index.mapper.MapperBuilders.shortField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
/**
*
*/
public class ShortFieldMapper extends NumberFieldMapper<Short> {
public static final String CONTENT_TYPE = "short";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType SHORT_FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.NUMBER_FIELD_TYPE);
static {
SHORT_FIELD_TYPE.freeze();
}
public static final Short NULL_VALUE = null;
}
public static class Builder extends NumberFieldMapper.Builder<Builder, ShortFieldMapper> {
protected Short nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.SHORT_FIELD_TYPE));
builder = this;
}
public Builder nullValue(short nullValue) {
this.nullValue = nullValue;
return this;
}
@Override
public ShortFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
ShortFieldMapper fieldMapper = new ShortFieldMapper(buildNames(context),
precisionStep, fuzzyFactor, boost, fieldType, nullValue,
ignoreMalformed(context), provider, similarity);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ShortFieldMapper.Builder builder = shortField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeShortValue(propNode));
}
}
return builder;
}
}
private Short nullValue;
private String nullValueAsString;
protected ShortFieldMapper(Names names, int precisionStep, String fuzzyFactor,
float boost, FieldType fieldType,
Short nullValue, Explicit<Boolean> ignoreMalformed,
PostingsFormatProvider provider, SimilarityProvider similarity) {
super(names, precisionStep, fuzzyFactor, boost, fieldType,
ignoreMalformed, new NamedAnalyzer("_short/" + precisionStep, new NumericIntegerAnalyzer(precisionStep)),
new NamedAnalyzer("_short/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)), provider, similarity);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
protected int maxPrecisionStep() {
return 32;
}
@Override
public Short value(Field field) {
BytesRef value = field.binaryValue();
if (value == null) {
return null;
}
return Numbers.bytesToShort(value.bytes);
}
@Override
public Short valueFromString(String value) {
return Short.valueOf(value);
}
@Override
public String indexedValue(String value) {
BytesRef bytesRef = new BytesRef();
NumericUtils.intToPrefixCoded(Short.parseShort(value), precisionStep(), bytesRef);
return bytesRef.utf8ToString();
}
@Override
public Query fuzzyQuery(String value, String minSim, int prefixLength, int maxExpansions, boolean transpositions) {
short iValue = Short.parseShort(value);
short iSim;
try {
iSim = Short.parseShort(minSim);
} catch (NumberFormatException e) {
iSim = (short) Float.parseFloat(minSim);
}
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query fuzzyQuery(String value, double minSim, int prefixLength, int maxExpansions, boolean transpositions) {
short iValue = Short.parseShort(value);
short iSim = (short) (minSim * dFuzzyFactor);
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);
}
@Override
public Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
lowerTerm == null ? null : Integer.parseInt(lowerTerm),
upperTerm == null ? null : Integer.parseInt(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeFilter.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);
}
@Override
public Filter rangeFilter(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeFilter.newIntRange(names.indexName(), precisionStep,
lowerTerm == null ? null : Integer.parseInt(lowerTerm),
upperTerm == null ? null : Integer.parseInt(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(FieldDataCache fieldDataCache, String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeFieldDataFilter.newShortRange(fieldDataCache, names.indexName(),
lowerTerm == null ? null : Short.parseShort(lowerTerm),
upperTerm == null ? null : Short.parseShort(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter nullValueFilter() {
if (nullValue == null) {
return null;
}
return NumericRangeFilter.newIntRange(names.indexName(), precisionStep,
nullValue.intValue(),
nullValue.intValue(),
true, true);
}
@Override
protected boolean customBoost() {
return true;
}
@Override
protected Field innerParseCreateField(ParseContext context) throws IOException {
short value;
float boost = this.boost;
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
if (nullValue == null) {
return null;
}
value = nullValue;
} else if (externalValue instanceof String) {
String sExternalValue = (String) externalValue;
if (sExternalValue.length() == 0) {
if (nullValue == null) {
return null;
}
value = nullValue;
} else {
value = Short.parseShort(sExternalValue);
}
} else {
value = ((Number) externalValue).shortValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Short.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
if (nullValue == null) {
return null;
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
String currentFieldName = null;
Short objValue = nullValue;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) {
if (parser.currentToken() != XContentParser.Token.VALUE_NULL) {
objValue = parser.shortValue();
}
} else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) {
boost = parser.floatValue();
}
}
}
if (objValue == null) {
// no value
return null;
}
value = objValue;
} else {
value = parser.shortValue();
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
}
}
}
CustomShortNumericField field = new CustomShortNumericField(this, value, fieldType);
field.setBoost(boost);
return field;
}
@Override
public FieldDataType fieldDataType() {
return FieldDataType.DefaultTypes.SHORT;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
this.nullValue = ((ShortFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((ShortFieldMapper) mergeWith).nullValueAsString;
}
}
@Override
protected void doXContentBody(XContentBuilder builder) throws IOException {
super.doXContentBody(builder);
if (indexed() != Defaults.SHORT_FIELD_TYPE.indexed() ||
analyzed() != Defaults.SHORT_FIELD_TYPE.tokenized()) {
builder.field("index", indexTokenizeOptionToString(indexed(), analyzed()));
}
if (stored() != Defaults.SHORT_FIELD_TYPE.stored()) {
builder.field("store", stored());
}
if (storeTermVectors() != Defaults.SHORT_FIELD_TYPE.storeTermVectors()) {
builder.field("store_term_vector", storeTermVectors());
}
if (storeTermVectorOffsets() != Defaults.SHORT_FIELD_TYPE.storeTermVectorOffsets()) {
builder.field("store_term_vector_offsets", storeTermVectorOffsets());
}
if (storeTermVectorPositions() != Defaults.SHORT_FIELD_TYPE.storeTermVectorPositions()) {
builder.field("store_term_vector_positions", storeTermVectorPositions());
}
if (storeTermVectorPayloads() != Defaults.SHORT_FIELD_TYPE.storeTermVectorPayloads()) {
builder.field("store_term_vector_payloads", storeTermVectorPayloads());
}
if (omitNorms() != Defaults.SHORT_FIELD_TYPE.omitNorms()) {
builder.field("omit_norms", omitNorms());
}
if (indexOptions() != Defaults.SHORT_FIELD_TYPE.indexOptions()) {
builder.field("index_options", indexOptionToString(indexOptions()));
}
if (precisionStep != Defaults.PRECISION_STEP) {
builder.field("precision_step", precisionStep);
}
if (fuzzyFactor != Defaults.FUZZY_FACTOR) {
builder.field("fuzzy_factor", fuzzyFactor);
}
if (similarity() != null) {
builder.field("similarity", similarity().name());
}
if (nullValue != null) {
builder.field("null_value", nullValue);
}
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
}
}
public static class CustomShortNumericField extends CustomNumericField {
private final short number;
private final NumberFieldMapper mapper;
public CustomShortNumericField(NumberFieldMapper mapper, short number, FieldType fieldType) {
super(mapper, mapper.stored() ? Numbers.shortToBytes(number) : null, fieldType);
this.mapper = mapper;
this.number = number;
}
@Override
public TokenStream tokenStream(Analyzer analyzer) throws IOException {
if (fieldType().indexed()) {
return mapper.popCachedStream().setIntValue(number);
}
return null;
}
@Override
public String numericAsString() {
return Short.toString(number);
}
}
}
| |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.compositor.bottombar.contextualsearch;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.drawable.Drawable;
import android.provider.Browser;
import android.text.TextUtils;
import android.widget.ImageView;
import androidx.core.graphics.drawable.DrawableCompat;
import org.chromium.base.IntentUtils;
import org.chromium.base.PackageManagerUtils;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.ChromeTabbedActivity;
import org.chromium.chrome.browser.ChromeTabbedActivity2;
import org.chromium.chrome.browser.IntentHandler;
import org.chromium.chrome.browser.contextualsearch.ContextualSearchUma;
import org.chromium.chrome.browser.contextualsearch.QuickActionCategory;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.theme.ThemeUtils;
import org.chromium.content_public.browser.LoadUrlParams;
import org.chromium.ui.resources.dynamics.DynamicResourceLoader;
import org.chromium.ui.resources.dynamics.ViewResourceInflater;
import org.chromium.ui.util.ColorUtils;
import java.net.URISyntaxException;
import java.util.List;
/**
* Stores information related to a Contextual Search "quick action."
* Actions can be activated through a tap on the Bar and include intents like calling a phone
* number or launching Maps for a street address.
*/
public class ContextualSearchQuickActionControl extends ViewResourceInflater {
private Context mContext;
private String mQuickActionUri;
private int mQuickActionCategory;
private int mToolbarBackgroundColor;
private boolean mHasQuickAction;
private boolean mOpenQuickActionInChrome;
private Intent mIntent;
private String mCaption;
/**
* @param context The Android Context used to inflate the View.
* @param resourceLoader The resource loader that will handle the snapshot capturing.
*/
public ContextualSearchQuickActionControl(Context context,
DynamicResourceLoader resourceLoader) {
super(R.layout.contextual_search_quick_action_icon_view,
R.id.contextual_search_quick_action_icon_view,
context, null, resourceLoader);
mContext = context;
}
/**
* Gets the resource ID of the icon for the given category.
* @param category Which application category the icon should be for.
* @return The resource ID or {@code null} if an unsupported category is supplied.
*/
private static Integer getIconResId(@QuickActionCategory int category) {
switch (category) {
case QuickActionCategory.ADDRESS:
return R.drawable.ic_place_googblue_36dp;
case QuickActionCategory.EMAIL:
return R.drawable.ic_email_googblue_36dp;
case QuickActionCategory.EVENT:
return R.drawable.ic_event_googblue_36dp;
case QuickActionCategory.PHONE:
return R.drawable.ic_phone_googblue_36dp;
case QuickActionCategory.WEBSITE:
return R.drawable.ic_link_grey600_36dp;
default:
return null;
}
}
/**
* Gets the caption string to show for the default app for the given category.
* @param category Which application category the string should be for.
* @return A string ID or {@code null} if an unsupported category is supplied.
*/
private static Integer getDefaultAppCaptionId(@QuickActionCategory int category) {
switch (category) {
case QuickActionCategory.ADDRESS:
return R.string.contextual_search_quick_action_caption_open;
case QuickActionCategory.EMAIL:
return R.string.contextual_search_quick_action_caption_email;
case QuickActionCategory.EVENT:
return R.string.contextual_search_quick_action_caption_event;
case QuickActionCategory.PHONE:
return R.string.contextual_search_quick_action_caption_phone;
case QuickActionCategory.WEBSITE:
return R.string.contextual_search_quick_action_caption_open;
default:
return null;
}
}
/**
* Gets the caption string to show for a generic app of the given category.
* @param category Which application category the string should be for.
* @return A string ID or {@code null} if an unsupported category is supplied.
*/
private static Integer getFallbackCaptionId(@QuickActionCategory int category) {
switch (category) {
case QuickActionCategory.ADDRESS:
return R.string.contextual_search_quick_action_caption_generic_map;
case QuickActionCategory.EMAIL:
return R.string.contextual_search_quick_action_caption_generic_email;
case QuickActionCategory.EVENT:
return R.string.contextual_search_quick_action_caption_generic_event;
case QuickActionCategory.PHONE:
return R.string.contextual_search_quick_action_caption_phone;
case QuickActionCategory.WEBSITE:
return R.string.contextual_search_quick_action_caption_generic_website;
default:
return null;
}
}
/**
* @param quickActionUri The URI for the intent associated with the quick action.
* If the URI is the empty string or cannot be parsed no quick
* action will be available.
* @param quickActionCategory The {@link QuickActionCategory} for the quick action.
* @param toolbarBackgroundColor The current toolbar background color. This may be
* used for icon tinting.
*/
public void setQuickAction(
String quickActionUri, int quickActionCategory, int toolbarBackgroundColor) {
if (TextUtils.isEmpty(quickActionUri) || quickActionCategory == QuickActionCategory.NONE
|| quickActionCategory >= QuickActionCategory.BOUNDARY) {
reset();
return;
}
mQuickActionUri = quickActionUri;
mQuickActionCategory = quickActionCategory;
mToolbarBackgroundColor = toolbarBackgroundColor;
resolveIntent();
}
/**
* Sends the intent associated with the quick action if one is available.
* @param tab The current tab, used to load a URL if the quick action should open
* inside Chrome.
*/
public void sendIntent(Tab tab) {
if (mOpenQuickActionInChrome) {
tab.loadUrl(new LoadUrlParams(mQuickActionUri));
return;
}
if (mIntent == null) return;
// Set the Browser application ID to us in case the user chooses Chrome
// as the app from the intent picker.
Context context = getContext();
mIntent.putExtra(Browser.EXTRA_APPLICATION_ID, context.getPackageName());
mIntent.putExtra(Browser.EXTRA_CREATE_NEW_TAB, true);
mIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if (context instanceof ChromeTabbedActivity2) {
// Set the window ID so the new tab opens in the correct window.
mIntent.putExtra(IntentHandler.EXTRA_WINDOW_ID, 2);
}
IntentUtils.safeStartActivity(mContext, mIntent);
}
/**
* @return The caption associated with the quick action or null if no quick action
* is available.
*/
public String getCaption() {
return mCaption;
}
/**
* @return The resource id for the icon associated with the quick action or 0 if no
* quick action is available.
*/
public int getIconResId() {
return mHasQuickAction ? getViewId() : 0;
}
/**
* @return Whether there is currently a quick action available.
*/
public boolean hasQuickAction() {
return mHasQuickAction;
}
/**
* Resets quick action data.
*/
public void reset() {
mQuickActionUri = "";
mQuickActionCategory = QuickActionCategory.NONE;
mHasQuickAction = false;
mOpenQuickActionInChrome = false;
mIntent = null;
mCaption = "";
mToolbarBackgroundColor = 0;
}
@Override
protected boolean shouldAttachView() {
return false;
}
private void resolveIntent() {
try {
mIntent = Intent.parseUri(mQuickActionUri, 0);
} catch (URISyntaxException e) {
// If the intent cannot be parsed, there is no quick action available.
ContextualSearchUma.logQuickActionIntentResolution(mQuickActionCategory, 0);
reset();
return;
}
PackageManager packageManager = mContext.getPackageManager();
// If a default is set, PackageManager#resolveActivity() will return the
// ResolveInfo for the default activity.
ResolveInfo possibleDefaultActivity = PackageManagerUtils.resolveActivity(mIntent, 0);
// PackageManager#queryIntentActivities() will return a list of activities that
// can handle the intent, sorted from best to worst. If there are no matching
// activities, an empty list is returned.
List<ResolveInfo> resolveInfoList = PackageManagerUtils.queryIntentActivities(mIntent, 0);
int numMatchingActivities = 0;
ResolveInfo defaultActivityResolveInfo = null;
for (ResolveInfo resolveInfo : resolveInfoList) {
if (resolveInfo.activityInfo != null && resolveInfo.activityInfo.exported) {
numMatchingActivities++;
if (possibleDefaultActivity == null
|| possibleDefaultActivity.activityInfo == null) {
continue;
}
// Return early if this resolveInfo matches the possibleDefaultActivity.
ActivityInfo possibleDefaultActivityInfo = possibleDefaultActivity.activityInfo;
ActivityInfo resolveActivityInfo = resolveInfo.activityInfo;
boolean matchesPossibleDefaultActivity =
TextUtils.equals(resolveActivityInfo.name, possibleDefaultActivityInfo.name)
&& TextUtils.equals(resolveActivityInfo.packageName,
possibleDefaultActivityInfo.packageName);
if (matchesPossibleDefaultActivity) {
defaultActivityResolveInfo = resolveInfo;
break;
}
}
}
ContextualSearchUma.logQuickActionIntentResolution(
mQuickActionCategory, numMatchingActivities);
if (numMatchingActivities == 0) {
reset();
return;
}
mHasQuickAction = true;
Drawable iconDrawable = null;
int iconResId = 0;
if (defaultActivityResolveInfo != null) {
iconDrawable = defaultActivityResolveInfo.loadIcon(mContext.getPackageManager());
if (mQuickActionCategory != QuickActionCategory.PHONE) {
// Use the default app's name to construct the caption.
mCaption = mContext.getResources().getString(
getDefaultAppCaptionId(mQuickActionCategory),
defaultActivityResolveInfo.loadLabel(packageManager));
} else {
// The caption for phone numbers does not use the app's name.
mCaption = mContext.getResources().getString(
getDefaultAppCaptionId(mQuickActionCategory));
}
} else if (mQuickActionCategory == QuickActionCategory.WEBSITE) {
// If there is not a default app handler for a URL, open the quick action
// inside of Chrome.
mOpenQuickActionInChrome = true;
if (mContext instanceof ChromeTabbedActivity) {
// Use the app icon if this is a ChromeTabbedActivity instance.
iconResId = R.mipmap.app_icon;
} else {
// Otherwise use the link icon.
iconResId = getIconResId(mQuickActionCategory);
if (mToolbarBackgroundColor != 0
&& !ThemeUtils.isUsingDefaultToolbarColor(
mContext, false, mToolbarBackgroundColor)
&& ColorUtils.shouldUseLightForegroundOnBackground(
mToolbarBackgroundColor)) {
// Tint the link icon to match the custom tab toolbar.
iconDrawable = mContext.getDrawable(iconResId);
iconDrawable.mutate();
DrawableCompat.setTint(iconDrawable, mToolbarBackgroundColor);
}
}
mCaption =
mContext.getResources().getString(getFallbackCaptionId(mQuickActionCategory));
} else {
iconResId = getIconResId(mQuickActionCategory);
mCaption =
mContext.getResources().getString(getFallbackCaptionId(mQuickActionCategory));
}
inflate();
if (iconDrawable != null) {
((ImageView) getView()).setImageDrawable(iconDrawable);
} else {
((ImageView) getView()).setImageResource(iconResId);
}
invalidate();
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2006, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.log.impl;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.*;
import org.apache.log4j.spi.ErrorHandler;
import org.apache.log4j.spi.Filter;
import org.apache.log4j.spi.LoggingEvent;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.log.api.LogConfigurationManager;
import org.sakaiproject.log.api.LogPermissionException;
import org.sakaiproject.util.StringUtil;
/**
* <p>
* Log4jConfigurationManager lets us configure the log4j system with overrides from sakai.properties. Someday it might even have a service API for other fun things!
* </p>
*/
public abstract class Log4jConfigurationManager implements LogConfigurationManager
{
/** Our log (commons). */
private static Log M_log = LogFactory.getLog(Log4jConfigurationManager.class);
/**********************************************************************************************************************************************************************************************************************************************************
* Dependencies
*********************************************************************************************************************************************************************************************************************************************************/
/**
* @return the UsageSessionService collaborator.
*/
protected abstract ServerConfigurationService serverConfigurationService();
/**
* @return the SecurityService collaborator.
*/
protected abstract SecurityService securityService();
/**********************************************************************************************************************************************************************************************************************************************************
* Configuration
*********************************************************************************************************************************************************************************************************************************************************/
/** Configuration: enable special log handling or not. */
protected boolean m_enabled = true;
/**
* Configuration: enable special log handling or not.
*
* @param value
* the setting (true of false) for enabled.
*/
public void setEnabled(String value)
{
m_enabled = Boolean.valueOf(value).booleanValue();
}
/** Map by logger name of set of message string starts to ignore. */
protected Map m_ignore = new HashMap();
public void setIgnore(Map ignore)
{
m_ignore = ignore;
}
/**********************************************************************************************************************************************************************************************************************************************************
* Init and Destroy
*********************************************************************************************************************************************************************************************************************************************************/
/**
* Final initialization, once all dependencies are set.
*/
public void init()
{
if (m_enabled)
{
// Load optional log4j.properties file from sakai home
String log4jConfigFilePath = serverConfigurationService().getSakaiHomePath() + "log4j.properties";
if (StringUtils.isNotEmpty(log4jConfigFilePath)) {
PropertyConfigurator.configureAndWatch(log4jConfigFilePath);
}
// slip in our appender
Appender a = Logger.getRootLogger().getAppender("Sakai");
if (a != null)
{
Logger.getRootLogger().removeAppender(a);
Logger.getRootLogger().addAppender(new SakaiAppender(a));
}
// set the log4j logging system with some overrides from sakai.properties
// each in the form LEVEL.NAME where LEVEL is OFF | TRACE | DEBUG | INFO | WARN | ERROR | FATAL | ALL, name is the logger name (such as org.sakaiproject)
// example:
// log.config.count=3
// log.config.1 = ALL.org.sakaiproject.log.impl
// log.config.2 = OFF.org.sakaiproject
// log.config.3 = DEBUG.org.sakaiproject.db.impl
String configs[] = serverConfigurationService().getStrings("log.config");
if (configs != null)
{
for (int i = 0; i < configs.length; i++)
{
String parts[] = StringUtil.splitFirst(configs[i], ".");
if ((parts != null) && (parts.length == 2))
{
doSetLogLevel(parts[0], parts[1]);
}
else
{
M_log.warn("invalid log.config entry: ignoring: " + configs[i]);
}
}
}
}
M_log.info("init(): enabled: " + m_enabled);
}
/**
* Final cleanup.
*/
public void destroy()
{
M_log.info("destroy()");
}
/**
* Set the log level
*
* @param level
* The log level string - one of OFF | TRACE | DEBUG | INFO | WARN | ERROR | FATAL | ALL
* @param loggerName
* The logger name.
*/
protected boolean doSetLogLevel(String level, String loggerName)
{
if (level.equals("OFF"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.OFF);
M_log.info("OFF logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("TRACE"))
{
// Note: log4j has nothing below debug
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("TRACE (DEBUG) logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("DEBUG"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("DEBUG logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("INFO"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.INFO);
M_log.info("INFO logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("WARN"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.WARN);
M_log.info("WARN logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ERROR"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ERROR);
M_log.info("ERROR logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("FATAL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.FATAL);
M_log.info("FATAL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ALL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ALL);
M_log.info("ALL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else
{
M_log.warn("invalid log level: ignoring: " + level);
return false;
}
return true;
}
/**********************************************************************************************************************************************************************************************************************************************************
* Work interface methods: LogConfigurationManager
*********************************************************************************************************************************************************************************************************************************************************/
/**
* {@inheritDoc}
*/
public boolean setLogLevel(String level, String loggerName) throws LogPermissionException
{
// check that this is a "super" user with the security service
if (!securityService().isSuperUser())
{
throw new LogPermissionException();
}
return doSetLogLevel(level, loggerName);
}
/**********************************************************************************************************************************************************************************************************************************************************
* Our special Appender
*********************************************************************************************************************************************************************************************************************************************************/
class SakaiAppender implements org.apache.log4j.Appender
{
protected Appender m_other = null;
public SakaiAppender(Appender other)
{
m_other = other;
}
public void addFilter(Filter arg0)
{
m_other.addFilter(arg0);
}
public void clearFilters()
{
m_other.clearFilters();
}
public void close()
{
m_other.close();
}
public void doAppend(LoggingEvent arg0)
{
String logger = arg0.getLoggerName();
String message = arg0.getRenderedMessage();
Level level = arg0.getLevel();
Set toIgnore = (Set) m_ignore.get(logger);
if (toIgnore != null)
{
// if any of the strings in the set start our message, skip it
for (Iterator i = toIgnore.iterator(); i.hasNext();)
{
String start = (String) i.next();
if (message.startsWith(start)) return;
}
}
m_other.doAppend(arg0);
}
public ErrorHandler getErrorHandler()
{
return m_other.getErrorHandler();
}
public Filter getFilter()
{
return m_other.getFilter();
}
public Layout getLayout()
{
return m_other.getLayout();
}
public String getName()
{
return m_other.getName();
}
public boolean requiresLayout()
{
return m_other.requiresLayout();
}
public void setErrorHandler(ErrorHandler arg0)
{
m_other.setErrorHandler(arg0);
}
public void setLayout(Layout arg0)
{
m_other.setLayout(arg0);
}
public void setName(String arg0)
{
m_other.setName(arg0);
}
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.workflow.instance.node;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import org.drools.core.common.InternalKnowledgeRuntime;
import org.drools.core.util.MVELSafeHelper;
import org.jbpm.process.core.Context;
import org.jbpm.process.core.ContextContainer;
import org.jbpm.process.core.context.exception.ExceptionScope;
import org.jbpm.process.core.context.variable.VariableScope;
import org.jbpm.process.core.impl.DataTransformerRegistry;
import org.jbpm.process.instance.ContextInstance;
import org.jbpm.process.instance.ContextInstanceContainer;
import org.jbpm.process.instance.ProcessInstance;
import org.jbpm.process.instance.StartProcessHelper;
import org.jbpm.process.instance.context.exception.ExceptionScopeInstance;
import org.jbpm.process.instance.context.variable.VariableScopeInstance;
import org.jbpm.process.instance.impl.ContextInstanceFactory;
import org.jbpm.process.instance.impl.ContextInstanceFactoryRegistry;
import org.jbpm.process.instance.impl.ProcessInstanceImpl;
import org.jbpm.process.instance.impl.util.VariableUtil;
import org.jbpm.util.PatternConstants;
import org.jbpm.workflow.core.node.DataAssociation;
import org.jbpm.workflow.core.node.SubProcessNode;
import org.jbpm.workflow.core.node.Transformation;
import org.jbpm.workflow.instance.impl.NodeInstanceResolverFactory;
import org.jbpm.workflow.instance.impl.VariableScopeResolverFactory;
import org.jbpm.workflow.instance.impl.WorkflowProcessInstanceImpl;
import org.kie.api.KieBase;
import org.kie.api.definition.process.Node;
import org.kie.api.definition.process.Process;
import org.kie.api.runtime.EnvironmentName;
import org.kie.api.runtime.KieRuntime;
import org.kie.api.runtime.manager.RuntimeEngine;
import org.kie.api.runtime.manager.RuntimeManager;
import org.kie.api.runtime.process.DataTransformer;
import org.kie.api.runtime.process.EventListener;
import org.kie.api.runtime.process.NodeInstance;
import org.kie.internal.KieInternalServices;
import org.kie.internal.process.CorrelationAwareProcessRuntime;
import org.kie.internal.process.CorrelationKey;
import org.kie.internal.process.CorrelationKeyFactory;
import org.kie.internal.runtime.manager.SessionNotFoundException;
import org.kie.internal.runtime.manager.context.CaseContext;
import org.kie.internal.runtime.manager.context.ProcessInstanceIdContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Runtime counterpart of a SubFlow node.
*
*/
public class SubProcessNodeInstance extends StateBasedNodeInstance implements EventListener, ContextInstanceContainer {
private static final long serialVersionUID = 510l;
private static final Logger logger = LoggerFactory.getLogger(SubProcessNodeInstance.class);
// NOTE: ContetxInstances are not persisted as current functionality (exception scope) does not require it
private Map<String, ContextInstance> contextInstances = new HashMap<String, ContextInstance>();
private Map<String, List<ContextInstance>> subContextInstances = new HashMap<String, List<ContextInstance>>();
private long processInstanceId;
protected SubProcessNode getSubProcessNode() {
return (SubProcessNode) getNode();
}
public void internalTrigger(final NodeInstance from, String type) {
super.internalTrigger(from, type);
// if node instance was cancelled, abort
if (getNodeInstanceContainer().getNodeInstance(getId()) == null) {
return;
}
if (!org.jbpm.workflow.core.Node.CONNECTION_DEFAULT_TYPE.equals(type)) {
throw new IllegalArgumentException(
"A SubProcess node only accepts default incoming connections!");
}
Map<String, Object> parameters = new HashMap<String, Object>();
for (Iterator<DataAssociation> iterator = getSubProcessNode().getInAssociations().iterator(); iterator.hasNext(); ) {
DataAssociation mapping = iterator.next();
Object parameterValue = null;
if (mapping.getTransformation() != null) {
Transformation transformation = mapping.getTransformation();
DataTransformer transformer = DataTransformerRegistry.get().find(transformation.getLanguage());
if (transformer != null) {
parameterValue = transformer.transform(transformation.getCompiledExpression(), getSourceParameters(mapping));
}
} else {
VariableScopeInstance variableScopeInstance = (VariableScopeInstance)
resolveContextInstance(VariableScope.VARIABLE_SCOPE, mapping.getSources().get(0));
if (variableScopeInstance != null) {
parameterValue = variableScopeInstance.getVariable(mapping.getSources().get(0));
} else {
try {
parameterValue = MVELSafeHelper.getEvaluator().eval(mapping.getSources().get(0), new NodeInstanceResolverFactory(this));
} catch (Throwable t) {
parameterValue = VariableUtil.resolveVariable(mapping.getSources().get(0), this);
if (parameterValue != null) {
parameters.put(mapping.getTarget(), parameterValue);
} else {
logger.error("Could not find variable scope for variable {}", mapping.getSources().get(0));
logger.error("when trying to execute SubProcess node {}", getSubProcessNode().getName());
logger.error("Continuing without setting parameter.");
}
}
}
}
if (parameterValue != null) {
parameters.put(mapping.getTarget(),parameterValue);
}
}
String processId = getSubProcessNode().getProcessId();
if (processId == null) {
// if process id is not given try with process name
processId = getSubProcessNode().getProcessName();
}
// resolve processId if necessary
Map<String, String> replacements = new HashMap<String, String>();
Matcher matcher = PatternConstants.PARAMETER_MATCHER.matcher(processId);
while (matcher.find()) {
String paramName = matcher.group(1);
if (replacements.get(paramName) == null) {
VariableScopeInstance variableScopeInstance = (VariableScopeInstance)
resolveContextInstance(VariableScope.VARIABLE_SCOPE, paramName);
if (variableScopeInstance != null) {
Object variableValue = variableScopeInstance.getVariable(paramName);
String variableValueString = variableValue == null ? "" : variableValue.toString();
replacements.put(paramName, variableValueString);
} else {
try {
Object variableValue = MVELSafeHelper.getEvaluator().eval(paramName, new NodeInstanceResolverFactory(this));
String variableValueString = variableValue == null ? "" : variableValue.toString();
replacements.put(paramName, variableValueString);
} catch (Throwable t) {
logger.error("Could not find variable scope for variable {}", paramName);
logger.error("when trying to replace variable in processId for sub process {}", getNodeName());
logger.error("Continuing without setting process id.");
}
}
}
}
for (Map.Entry<String, String> replacement: replacements.entrySet()) {
processId = processId.replace("#{" + replacement.getKey() + "}", replacement.getValue());
}
KieBase kbase = ((ProcessInstance) getProcessInstance()).getKnowledgeRuntime().getKieBase();
// start process instance
Process process = kbase.getProcess(processId);
if (process == null) {
// try to find it by name
String latestProcessId = StartProcessHelper.findLatestProcessByName(kbase, processId);
if (latestProcessId != null) {
processId = latestProcessId;
process = kbase.getProcess(processId);
}
}
if (process == null) {
logger.error("Could not find process {}", processId);
logger.error("Aborting process");
((ProcessInstance) getProcessInstance()).setState(ProcessInstance.STATE_ABORTED);
throw new RuntimeException("Could not find process " + processId);
} else {
KieRuntime kruntime = ((ProcessInstance) getProcessInstance()).getKnowledgeRuntime();
RuntimeManager manager = (RuntimeManager) kruntime.getEnvironment().get(EnvironmentName.RUNTIME_MANAGER);
if (manager != null) {
org.kie.api.runtime.manager.Context<?> context = ProcessInstanceIdContext.get();
String caseId = (String) kruntime.getEnvironment().get(EnvironmentName.CASE_ID);
if (caseId != null) {
context = CaseContext.get(caseId);
}
RuntimeEngine runtime = manager.getRuntimeEngine(context);
kruntime = (KieRuntime) runtime.getKieSession();
}
if (getSubProcessNode().getMetaData("MICollectionInput") != null) {
// remove foreach input variable to avoid problems when running in variable strict mode
parameters.remove(getSubProcessNode().getMetaData("MICollectionInput"));
}
ProcessInstance processInstance = null;
if (((WorkflowProcessInstanceImpl)getProcessInstance()).getCorrelationKey() != null) {
// in case there is correlation key on parent instance pass it along to child so it can be easily correlated
// since correlation key must be unique for active instances it appends processId and timestamp
List<String> businessKeys = new ArrayList<String>();
businessKeys.add(((WorkflowProcessInstanceImpl)getProcessInstance()).getCorrelationKey());
businessKeys.add(processId);
businessKeys.add(String.valueOf(System.currentTimeMillis()));
CorrelationKeyFactory correlationKeyFactory = KieInternalServices.Factory.get().newCorrelationKeyFactory();
CorrelationKey subProcessCorrelationKey = correlationKeyFactory.newCorrelationKey(businessKeys);
processInstance = (ProcessInstance) ((CorrelationAwareProcessRuntime)kruntime).createProcessInstance(processId, subProcessCorrelationKey, parameters);
} else {
processInstance = ( ProcessInstance ) kruntime.createProcessInstance(processId, parameters);
}
this.processInstanceId = processInstance.getId();
((ProcessInstanceImpl) processInstance).setMetaData("ParentProcessInstanceId", getProcessInstance().getId());
((ProcessInstanceImpl) processInstance).setMetaData("ParentNodeInstanceId", getUniqueId());
((ProcessInstanceImpl) processInstance).setMetaData("ParentNodeId", getSubProcessNode().getUniqueId());
((ProcessInstanceImpl) processInstance).setParentProcessInstanceId(getProcessInstance().getId());
((ProcessInstanceImpl) processInstance).setSignalCompletion(getSubProcessNode().isWaitForCompletion());
kruntime.startProcessInstance(processInstance.getId());
if (!getSubProcessNode().isWaitForCompletion()) {
triggerCompleted();
} else if (processInstance.getState() == ProcessInstance.STATE_COMPLETED
|| processInstance.getState() == ProcessInstance.STATE_ABORTED) {
processInstanceCompleted(processInstance);
} else {
addProcessListener();
}
}
}
public void cancel() {
super.cancel();
if (getSubProcessNode() == null || !getSubProcessNode().isIndependent()) {
ProcessInstance processInstance = null;
InternalKnowledgeRuntime kruntime = ((ProcessInstance) getProcessInstance()).getKnowledgeRuntime();
RuntimeManager manager = (RuntimeManager) kruntime.getEnvironment().get(EnvironmentName.RUNTIME_MANAGER);
if (manager != null) {
try {
org.kie.api.runtime.manager.Context<?> context = ProcessInstanceIdContext.get(processInstanceId);
String caseId = (String) kruntime.getEnvironment().get(EnvironmentName.CASE_ID);
if (caseId != null) {
context = CaseContext.get(caseId);
}
RuntimeEngine runtime = manager.getRuntimeEngine(context);
KieRuntime managedkruntime = (KieRuntime) runtime.getKieSession();
processInstance = (ProcessInstance) managedkruntime.getProcessInstance(processInstanceId);
} catch (SessionNotFoundException e) {
// in case no session is found for parent process let's skip signal for process instance completion
}
} else {
processInstance = (ProcessInstance) kruntime.getProcessInstance(processInstanceId);
}
if (processInstance != null) {
processInstance.setState(ProcessInstance.STATE_ABORTED);
}
}
}
public long getProcessInstanceId() {
return processInstanceId;
}
public void internalSetProcessInstanceId(long processInstanceId) {
this.processInstanceId = processInstanceId;
}
public void addEventListeners() {
super.addEventListeners();
addProcessListener();
}
private void addProcessListener() {
getProcessInstance().addEventListener("processInstanceCompleted:" + processInstanceId, this, true);
}
public void removeEventListeners() {
super.removeEventListeners();
getProcessInstance().removeEventListener("processInstanceCompleted:" + processInstanceId, this, true);
}
@Override
public void signalEvent(String type, Object event) {
if (("processInstanceCompleted:" + processInstanceId).equals(type)) {
processInstanceCompleted((ProcessInstance) event);
} else {
super.signalEvent(type, event);
}
}
@Override
public String[] getEventTypes() {
return new String[] { "processInstanceCompleted:" + processInstanceId };
}
public void processInstanceCompleted(ProcessInstance processInstance) {
removeEventListeners();
handleOutMappings(processInstance);
if (processInstance.getState() == ProcessInstance.STATE_ABORTED) {
String faultName = processInstance.getOutcome()==null?"":processInstance.getOutcome();
// handle exception as sub process failed with error code
ExceptionScopeInstance exceptionScopeInstance = (ExceptionScopeInstance)
resolveContextInstance(ExceptionScope.EXCEPTION_SCOPE, faultName);
if (exceptionScopeInstance != null) {
exceptionScopeInstance.handleException(faultName, processInstance.getFaultData());
if (getSubProcessNode() != null && !getSubProcessNode().isIndependent() && getSubProcessNode().isAbortParent()){
cancel();
}
return;
} else if (getSubProcessNode() != null && !getSubProcessNode().isIndependent() && getSubProcessNode().isAbortParent()){
((ProcessInstance) getProcessInstance()).setState(ProcessInstance.STATE_ABORTED, faultName);
return;
}
}
// handle dynamic subprocess
if (getNode() == null) {
setMetaData("NodeType", "SubProcessNode");
}
// if there were no exception proceed normally
triggerCompleted();
}
private void handleOutMappings(ProcessInstance processInstance) {
VariableScopeInstance subProcessVariableScopeInstance = (VariableScopeInstance)
processInstance.getContextInstance(VariableScope.VARIABLE_SCOPE);
SubProcessNode subProcessNode = getSubProcessNode();
if (subProcessNode != null) {
for (Iterator<org.jbpm.workflow.core.node.DataAssociation> iterator= subProcessNode.getOutAssociations().iterator(); iterator.hasNext(); ) {
org.jbpm.workflow.core.node.DataAssociation mapping = iterator.next();
if (mapping.getTransformation() != null) {
Transformation transformation = mapping.getTransformation();
DataTransformer transformer = DataTransformerRegistry.get().find(transformation.getLanguage());
if (transformer != null) {
Object parameterValue = transformer.transform(transformation.getCompiledExpression(), subProcessVariableScopeInstance.getVariables());
VariableScopeInstance variableScopeInstance = (VariableScopeInstance)
resolveContextInstance(VariableScope.VARIABLE_SCOPE, mapping.getTarget());
if (variableScopeInstance != null && parameterValue != null) {
variableScopeInstance.setVariable(mapping.getTarget(), parameterValue);
} else {
logger.warn("Could not find variable scope for variable {}", mapping.getTarget());
logger.warn("Continuing without setting variable.");
}
}
} else {
VariableScopeInstance variableScopeInstance = (VariableScopeInstance)
resolveContextInstance(VariableScope.VARIABLE_SCOPE, mapping.getTarget());
if (variableScopeInstance != null) {
Object value = subProcessVariableScopeInstance.getVariable(mapping.getSources().get(0));
if (value == null) {
try {
value = MVELSafeHelper.getEvaluator().eval(mapping.getSources().get(0), new VariableScopeResolverFactory(subProcessVariableScopeInstance));
} catch (Throwable t) {
// do nothing
}
}
variableScopeInstance.setVariable(mapping.getTarget(), value);
} else {
logger.error("Could not find variable scope for variable {}", mapping.getTarget());
logger.error("when trying to complete SubProcess node {}", getSubProcessNode().getName());
logger.error("Continuing without setting variable.");
}
}
}
} else {
// handle dynamic sub processes without data output mapping
mapDynamicOutputData(subProcessVariableScopeInstance.getVariables());
}
}
public String getNodeName() {
Node node = getNode();
if (node == null) {
return "[Dynamic] Sub Process";
}
return super.getNodeName();
}
@Override
public List<ContextInstance> getContextInstances(String contextId) {
return this.subContextInstances.get(contextId);
}
@Override
public void addContextInstance(String contextId, ContextInstance contextInstance) {
List<ContextInstance> list = this.subContextInstances.get(contextId);
if (list == null) {
list = new ArrayList<ContextInstance>();
this.subContextInstances.put(contextId, list);
}
list.add(contextInstance);
}
@Override
public void removeContextInstance(String contextId, ContextInstance contextInstance) {
List<ContextInstance> list = this.subContextInstances.get(contextId);
if (list != null) {
list.remove(contextInstance);
}
}
@Override
public ContextInstance getContextInstance(String contextId, long id) {
List<ContextInstance> contextInstances = subContextInstances.get(contextId);
if (contextInstances != null) {
for (ContextInstance contextInstance: contextInstances) {
if (contextInstance.getContextId() == id) {
return contextInstance;
}
}
}
return null;
}
@Override
public ContextInstance getContextInstance(Context context) {
ContextInstanceFactory conf = ContextInstanceFactoryRegistry.INSTANCE.getContextInstanceFactory(context);
if (conf == null) {
throw new IllegalArgumentException("Illegal context type (registry not found): " + context.getClass());
}
ContextInstance contextInstance = (ContextInstance) conf.getContextInstance(context, this, (ProcessInstance) getProcessInstance());
if (contextInstance == null) {
throw new IllegalArgumentException("Illegal context type (instance not found): " + context.getClass());
}
return contextInstance;
}
@Override
public ContextContainer getContextContainer() {
return getSubProcessNode();
}
protected Map<String, Object> getSourceParameters(DataAssociation association) {
Map<String, Object> parameters = new HashMap<String, Object>();
for (String sourceParam : association.getSources()) {
Object parameterValue = null;
VariableScopeInstance variableScopeInstance = (VariableScopeInstance)
resolveContextInstance(VariableScope.VARIABLE_SCOPE, sourceParam);
if (variableScopeInstance != null) {
parameterValue = variableScopeInstance.getVariable(sourceParam);
} else {
try {
parameterValue = MVELSafeHelper.getEvaluator().eval(sourceParam, new NodeInstanceResolverFactory(this));
} catch (Throwable t) {
logger.warn("Could not find variable scope for variable {}", sourceParam);
}
}
if (parameterValue != null) {
parameters.put(association.getTarget(), parameterValue);
}
}
return parameters;
}
}
| |
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Note that the Go client is the reference client implementation for the waterfall service
package com.google.waterfall.client;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import com.google.protobuf.ByteString;
import com.google.waterfall.WaterfallGrpc;
import com.google.waterfall.WaterfallGrpc.WaterfallStub;
import com.google.waterfall.WaterfallProto.Cmd;
import com.google.waterfall.WaterfallProto.CmdProgress;
import com.google.waterfall.WaterfallProto.Transfer;
import com.google.waterfall.tar.Tar;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import io.grpc.stub.StreamObserver;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
* Client for the waterfall service using gRPC. Executes asynchronously.
* */
public class WaterfallClient {
private static final int PIPE_BUFFER_SIZE = 256 * 1024;
private static final int SHUTDOWN_TIMEOUT_SECONDS = 1;
private final WaterfallStub asyncStub;
private final ManagedChannel channel;
private final ListeningExecutorService executorService;
private final boolean shouldCleanupExecutorService;
/** @param channelBuilder channelBuilder initialized with the server's settings. */
private WaterfallClient(
ManagedChannelBuilder<?> channelBuilder,
ListeningExecutorService executorService,
boolean shouldCleanupExecutorService) {
this.channel = channelBuilder.build();
asyncStub = WaterfallGrpc.newStub(channel);
this.executorService = executorService;
this.shouldCleanupExecutorService = shouldCleanupExecutorService;
}
/**
* Factory for WaterfallClient builders.
*
* @return A new WaterfallClient builder
*/
public static Builder newBuilder() {
return new Builder();
}
/** Builder for WaterfallClient. */
public static class Builder {
private ManagedChannelBuilder<?> channelBuilder;
private ListeningExecutorService executorService;
private boolean shouldCleanupExecutorService = true;
/**
* Returns same builder caller.
*
* @param channelBuilder channelBuilder initialized with the server's settings.
*/
public Builder withChannelBuilder(ManagedChannelBuilder<?> channelBuilder) {
// Don't realize the channel just yet. Wait until instance creation so we can safely pass the
// builder object in instances where the server is not running. This is mostly useful
// for Guice.
this.channelBuilder = channelBuilder;
return this;
}
public Builder withListeningExecutorService(ListeningExecutorService executorService) {
Preconditions.checkArgument(!executorService.isShutdown());
this.executorService = executorService;
shouldCleanupExecutorService = false;
return this;
}
/**
* Returns WaterfallClient with channel initialized.
*/
public WaterfallClient build() {
Objects.requireNonNull(
channelBuilder, "Must specify non-null arg to withChannelBuilder before building.");
if (this.executorService == null) {
this.executorService = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool());
}
return new WaterfallClient(channelBuilder, executorService, shouldCleanupExecutorService);
}
}
/**
* Pulls the specified file/dir from src into dst.
*
* @param src Absolute path to source file on device
* @param dst Absolute path to destination directory on host using location file system
*/
public ListenableFuture<Void> pull(String src, String dst) {
return this.pull(src, Paths.get(dst));
}
/**
* Pulls the specified file/dir from src into dst.
*
* @param src Absolute path to source file on device
* @param dst Absolute path to destination directory on host using location file system
*/
public ListenableFuture<Void> pull(String src, Path dst) {
try {
PipedInputStream input = new PipedInputStream(PIPE_BUFFER_SIZE);
PipedOutputStream output = new PipedOutputStream(input);
final SettableFuture<Void> future = SettableFuture.create();
pullFromWaterfall(src, output, future);
final ListenableFuture<Void> untarFuture =
executorService.submit(
() -> {
try {
Tar.untar(input, dst.toString());
future.set(null);
} catch (IOException e) {
future.setException(e);
} finally {
try {
output.close();
} catch (IOException e) {
future.setException(e);
}
}
return null;
});
// Cancel running untar if there was an exception in pulling file from waterfall.
Futures.addCallback(
future,
new FutureCallback<Void>() {
@Override
public void onSuccess(Void result) {}
@Override
public void onFailure(Throwable t) {
untarFuture.cancel(true);
}
},
MoreExecutors.directExecutor());
return future;
} catch (IOException e) {
throw new WaterfallRuntimeException("Unable to pull src files/dirs from device.", e);
}
}
/**
* Pulls the specified file from src into output stream. Only a single src file is accepted
*
* @param src Absolute path to source file on device. This should point to an existing file on the
* device, that is not a symlink or a directory.
* @param out Output stream where the contents of src files will be written to.
*/
public ListenableFuture<Void> pullFile(String src, OutputStream out) {
try {
PipedInputStream input = new PipedInputStream(PIPE_BUFFER_SIZE);
PipedOutputStream output = new PipedOutputStream(input);
final SettableFuture<Void> future = SettableFuture.create();
pullFromWaterfall(src, output, future);
final ListenableFuture<Void> untarFuture =
executorService.submit(
() -> {
try {
Tar.untarFile(input, out);
future.set(null);
} catch (IOException e) {
future.setException(e);
} finally {
try {
output.close();
} catch (IOException e) {
future.setException(e);
}
}
return null;
});
// Cancel running untar if there was an exception in pulling file from waterfall.
Futures.addCallback(
future,
new FutureCallback<Void>() {
@Override
public void onSuccess(Void result) {}
@Override
public void onFailure(Throwable t) {
untarFuture.cancel(true);
}
},
MoreExecutors.directExecutor());
return future;
} catch (IOException e) {
throw new WaterfallRuntimeException("Unable to pull src file from device.", e);
}
}
private void pullFromWaterfall(String src, OutputStream output, SettableFuture<Void> future) {
Transfer transfer = Transfer.newBuilder().setPath(src).build();
StreamObserver<Transfer> responseObserver =
new StreamObserver<Transfer>() {
@Override
public void onNext(Transfer value) {
try {
value.getPayload().writeTo(output);
} catch (IOException e) {
onError(new WaterfallRuntimeException("Unable to pull file(s) from device.", e));
}
}
@Override
public void onError(Throwable t) {
future.setException(t);
}
@Override
public void onCompleted() {
try {
output.close();
} catch (IOException e) {
onError(e);
}
}
};
asyncStub.pull(transfer, responseObserver);
}
/**
* Push the specified file/dir from src into dst.
*
* @param src Absolute path to source file on host using local filesystem
* @param dst Absolute path to destination on device
*/
public Future<Void> push(String src, String dst) {
return this.push(Paths.get(src), dst);
}
/**
* Push the specified file/dir from src into dst.
*
* @param src Absolute path to source file on host using local filesystem.
* @param dst Absolute path to destination on device
*/
public Future<Void> push(Path src, String dst) {
try {
PipedInputStream input = new PipedInputStream(PIPE_BUFFER_SIZE);
PipedOutputStream output = new PipedOutputStream(input);
final SettableFuture<Void> future = SettableFuture.create();
ListenableFuture<Void> unusedTarFuture =
executorService.submit(
() -> {
try {
Tar.tar(src.toString(), output);
} catch (IOException e) {
future.setException(e);
} finally {
try {
output.close();
} catch (IOException e) {
future.setException(e);
}
}
return null;
});
pushToWaterfall(input, dst, future);
return future;
} catch (IOException e) {
throw new WaterfallRuntimeException("Unable to push file(s) into device", e);
}
}
/**
* Push a byte array into destination file onto device.
*
* @param src byte array of a single file content to be transferred to device.
* @param dst Absolute path to destination on device
*/
public Future<Void> pushBytes(byte[] src, String dst) {
try {
PipedInputStream input = new PipedInputStream(PIPE_BUFFER_SIZE);
PipedOutputStream output = new PipedOutputStream(input);
final SettableFuture<Void> future = SettableFuture.create();
ListenableFuture<Void> unusedTarFuture =
executorService.submit(
() -> {
try {
Tar.tarFile(src, output);
} catch (IOException e) {
future.setException(e);
} finally {
try {
output.close();
} catch (IOException e) {
future.setException(e);
}
}
return null;
});
pushToWaterfall(input, dst, future);
return future;
} catch (IOException e) {
throw new WaterfallRuntimeException("Unable to push bytes into device", e);
}
}
private void pushToWaterfall(InputStream in, String dst, SettableFuture<Void> future) {
StreamObserver<Transfer> responseObserver =
new StreamObserver<Transfer>() {
@Override
public void onNext(Transfer transfer) {
// We don't expect any incoming messages when pushing.
}
@Override
public void onError(Throwable t) {
future.setException(t);
}
@Override
public void onCompleted() {
future.set(null);
}
};
StreamObserver<Transfer> requestObserver = asyncStub.push(responseObserver);
requestObserver.onNext(Transfer.newBuilder().setPath(dst).build());
final ListenableFuture<?> transferFuture =
executorService.submit(
() -> {
try {
byte[] buff = new byte[PIPE_BUFFER_SIZE];
while (!future.isDone()) {
int r = in.read(buff);
if (r == -1) {
break;
}
requestObserver.onNext(
Transfer.newBuilder().setPayload(ByteString.copyFrom(buff, 0, r)).build());
}
in.close();
requestObserver.onCompleted();
} catch (IOException e) {
requestObserver.onError(e);
future.setException(e);
}
});
Futures.addCallback(
future,
new FutureCallback<Void>() {
@Override
public void onSuccess(Void result) {}
@Override
public void onFailure(Throwable t) {
transferFuture.cancel(true);
}
},
MoreExecutors.directExecutor());
}
/**
* Executes a command on the device.
*
* @param command executable command on device
* @param args args list for executable command on device
* @param input stdin input for executable command on device
* @param stdout captures any standard output from executing command on device.
* @param stderr captures any standard error from executing command on device.
*/
public ListenableFuture<CmdProgress> exec(
String command, List<String> args, String input, OutputStream stdout, OutputStream stderr) {
try {
return execChecked(command, args, input, stdout, stderr);
} catch (Exception e) {
throw new WaterfallRuntimeException("Exception running waterfall exec command", e);
}
}
private ListenableFuture<CmdProgress> execChecked(
String command, List<String> args, String input, OutputStream stdout, OutputStream stderr) {
final SettableFuture<CmdProgress> result = SettableFuture.create();
StreamObserver<CmdProgress> responseObserver = new StreamObserver<CmdProgress>() {
private CmdProgress last = null;
@Override
public void onNext(CmdProgress cmdProgress) {
try {
cmdProgress.getStdout().writeTo(stdout);
cmdProgress.getStderr().writeTo(stderr);
last = cmdProgress;
} catch (IOException e) {
onError(e);
}
}
@Override
public void onError(Throwable t) {
result.setException(t);
}
@Override
public void onCompleted() {
result.set(last);
}
};
StreamObserver<CmdProgress> requestObserver = asyncStub.exec(responseObserver);
try {
requestObserver.onNext(
CmdProgress.newBuilder()
.setCmd(Cmd.newBuilder().setPath(command).addAllArgs(args).setPipeIn(input != null))
.build());
if (input != null) {
requestObserver.onNext(
CmdProgress.newBuilder().setStdin(ByteString.copyFromUtf8(input)).build());
}
requestObserver.onCompleted();
} catch (Exception e) {
requestObserver.onError(e);
result.setException(e);
}
return result;
}
/**
* Cleans up client. Times out if channel termination takes too long.
*
* @throws InterruptedException Thrown by channel cleanup.
* */
public void shutdown() throws InterruptedException {
channel.shutdown().awaitTermination(SHUTDOWN_TIMEOUT_SECONDS, TimeUnit.SECONDS);
if (shouldCleanupExecutorService) {
executorService.shutdown();
}
}
/** Generic runtime exception thrown by WaterfallClient. */
public static final class WaterfallRuntimeException extends RuntimeException {
WaterfallRuntimeException(String msg, Throwable cause) {
super(msg, cause);
}
}
}
| |
package org.jcodec.containers.mkv.boxes;
import static java.lang.System.arraycopy;
import static org.jcodec.containers.mkv.boxes.EbmlSint.convertToBytes;
import static org.jcodec.containers.mkv.boxes.EbmlSint.signedComplement;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.Arrays;
import org.jcodec.common.ByteArrayList;
import org.jcodec.common.io.SeekableByteChannel;
import org.jcodec.containers.mkv.util.EbmlUtil;
import org.jcodec.platform.Platform;
/**
* This class is part of JCodec ( www.jcodec.org ) This software is distributed
* under FreeBSD License
*
* @author The JCodec project
*
*/
public class MkvBlock extends EbmlBin {
private static final String XIPH = "Xiph";
private static final String EBML = "EBML";
private static final String FIXED = "Fixed";
@SuppressWarnings("unused")
private static final int MAX_BLOCK_HEADER_SIZE = 512;
public int[] frameOffsets;
public int[] frameSizes;
public long trackNumber;
public int timecode;
public long absoluteTimecode;
public boolean _keyFrame;
public int headerSize;
public String lacing;
public boolean discardable;
public boolean lacingPresent;
public ByteBuffer[] frames;
public static final byte[] BLOCK_ID = new byte[]{(byte)0xA1};
public static final byte[] SIMPLEBLOCK_ID = new byte[]{(byte)0xA3};
public static MkvBlock copy(MkvBlock old) {
MkvBlock be = new MkvBlock(old.id);
be.trackNumber = old.trackNumber;
be.timecode = old.timecode;
be.absoluteTimecode = old.absoluteTimecode;
be._keyFrame = old._keyFrame;
be.headerSize = old.headerSize;
be.lacing = old.lacing;
be.discardable = old.discardable;
be.lacingPresent = old.lacingPresent;
be.frameOffsets = new int[old.frameOffsets.length];
be.frameSizes = new int[old.frameSizes.length];
be.dataOffset = old.dataOffset;
be.offset = old.offset;
be.type = old.type;
arraycopy(old.frameOffsets, 0, be.frameOffsets, 0, be.frameOffsets.length);
arraycopy(old.frameSizes, 0, be.frameSizes, 0, be.frameSizes.length);
return be;
}
public static MkvBlock keyFrame(long trackNumber, int timecode, ByteBuffer frame) {
MkvBlock be = new MkvBlock(SIMPLEBLOCK_ID);
be.frames = new ByteBuffer[] { frame };
be.frameSizes = new int[] { frame.limit() };
be._keyFrame = true;
be.trackNumber = trackNumber;
be.timecode = timecode;
return be;
}
public MkvBlock(byte[] type) {
super(type);
if (!Platform.arrayEqualsByte(SIMPLEBLOCK_ID, type) && !Platform.arrayEqualsByte(BLOCK_ID, type))
throw new IllegalArgumentException("Block initiated with invalid id: " + EbmlUtil.toHexString(type));
}
@Override
public void readChannel(SeekableByteChannel is) throws IOException {
ByteBuffer bb = ByteBuffer.allocate((int) 100);
is.read(bb);
bb.flip();
this.read(bb);
is.setPosition(this.dataOffset+this.dataLen);
}
@Override
public void read(ByteBuffer source) {
ByteBuffer bb = source.slice();
trackNumber = MkvBlock.ebmlDecode(bb);
int tcPart1 = bb.get() & 0xFF;
int tcPart2 = bb.get() & 0xFF;
timecode = (short) (((short) tcPart1 << 8) | (short) tcPart2);
int flags = bb.get() & 0xFF;
_keyFrame = (flags & 0x80) > 0;
discardable = (flags & 0x01) > 0;
int laceFlags = flags & 0x06;
lacingPresent = laceFlags != 0x00;
if (lacingPresent) {
int lacesCount = bb.get() & 0xFF;
frameSizes = new int[lacesCount + 1];
if (laceFlags == 0x02) {
/* Xiph */
lacing = XIPH;
headerSize = readXiphLaceSizes(bb, frameSizes, (int) this.dataLen, bb.position());
} else if (laceFlags == 0x06) {
/* EBML */
lacing = EBML;
headerSize = readEBMLLaceSizes(bb, frameSizes, (int) this.dataLen, bb.position());
} else if (laceFlags == 0x04) {
/* Fixed Size Lacing */
this.lacing = FIXED;
this.headerSize = bb.position();
int aLaceSize = (int) ((this.dataLen - this.headerSize) / (lacesCount + 1));
Arrays.fill(frameSizes, aLaceSize);
} else {
throw new RuntimeException("Unsupported lacing type flag.");
}
turnSizesToFrameOffsets(frameSizes);
} else {
this.lacing = "";
int frameOffset = bb.position();
frameOffsets = new int[1];
frameOffsets[0] = frameOffset;
headerSize = bb.position();
frameSizes = new int[1];
frameSizes[0] = (int) (this.dataLen - headerSize);
}
}
private void turnSizesToFrameOffsets(int[] sizes) {
frameOffsets = new int[sizes.length];
frameOffsets[0] = headerSize;
for (int i = 1; i < sizes.length; i++)
frameOffsets[i] = frameOffsets[i - 1] + sizes[i - 1];
}
public static int readXiphLaceSizes(ByteBuffer bb, int[] sizes, int size, int preLacingHeaderSize) {
int startPos = bb.position();
int lastIndex = sizes.length - 1;
sizes[lastIndex] = size;
for (int l = 0; l < lastIndex; l++) {
int laceSize = 255;
while (laceSize == 255) {
laceSize = bb.get() & 0xFF;
sizes[l] += laceSize;
}
// Update the size of the last block
sizes[lastIndex] -= sizes[l];
}
int headerSize = (bb.position() - startPos) + preLacingHeaderSize;
sizes[lastIndex] -= headerSize;
return headerSize;
}
public static int readEBMLLaceSizes(ByteBuffer source, int[] sizes, int size, int preLacingHeaderSize) {
int lastIndex = sizes.length - 1;
sizes[lastIndex] = size;
int startPos = source.position();
sizes[0] = (int) MkvBlock.ebmlDecode(source);
sizes[lastIndex] -= sizes[0];
int laceSize = sizes[0];
long laceSizeDiff = 0;
for (int l = 1; l < lastIndex; l++) {
laceSizeDiff = MkvBlock.ebmlDecodeSigned(source);
laceSize += laceSizeDiff;
sizes[l] = laceSize;
// Update the size of the last block
sizes[lastIndex] -= sizes[l];
}
int headerSize = (source.position() - startPos) + preLacingHeaderSize;
sizes[lastIndex] -= headerSize;
return headerSize;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{dataOffset: ").append(dataOffset);
sb.append(", trackNumber: ").append(trackNumber);
sb.append(", timecode: ").append(timecode);
sb.append(", keyFrame: ").append(_keyFrame);
sb.append(", headerSize: ").append(headerSize);
sb.append(", lacing: ").append(lacing);
for (int i = 0; i < frameSizes.length; i++)
sb.append(", frame[").append(i).append("] offset ").append(frameOffsets[i]).append(" size ").append(frameSizes[i]);
sb.append(" }");
return sb.toString();
}
public ByteBuffer[] getFrames(ByteBuffer source) throws IOException {
ByteBuffer[] frames = new ByteBuffer[frameSizes.length];
for (int i = 0; i < frameSizes.length; i++) {
if (frameOffsets[i] > source.limit())
System.err.println("frame offset: " + frameOffsets[i] + " limit: " + source.limit());
source.position(frameOffsets[i]);
ByteBuffer bb = source.slice();
bb.limit(frameSizes[i]);
frames[i] = bb;
}
return frames;
}
public void readFrames(ByteBuffer source) throws IOException {
this.frames = getFrames(source);
}
// @Override
public ByteBuffer getData() {
int dataSize = (int) getDataSize();
ByteBuffer bb = ByteBuffer.allocate(dataSize + EbmlUtil.ebmlLength(dataSize) + id.length);
bb.put(id);
bb.put(EbmlUtil.ebmlEncode(dataSize));
bb.put(EbmlUtil.ebmlEncode(trackNumber));
bb.put((byte) ((timecode >>> 8) & 0xFF));
bb.put((byte) (timecode & 0xFF));
byte flags = 0x00;
if (XIPH.equals(lacing)) {
flags = 0x02;
} else if (EBML.equals(lacing)) {
flags = 0x06;
} else if (FIXED.equals(lacing)) {
flags = 0x04;
}
if (discardable)
flags |= 0x01;
if (_keyFrame)
flags |= 0x80;
bb.put(flags);
if ((flags & 0x06) != 0) {
bb.put((byte) ((frames.length - 1) & 0xFF));
bb.put(muxLacingInfo());
}
for (int i = 0; i < frames.length; i++) {
ByteBuffer frame = frames[i];
bb.put(frame);
}
bb.flip();
return bb;
}
public void seekAndReadContent(FileChannel source) throws IOException {
data = ByteBuffer.allocate((int) dataLen);
source.position(dataOffset);
source.read(data);
this.data.flip();
}
/**
* Get the total size of this element
*/
@Override
public long size() {
long size = getDataSize();
size += EbmlUtil.ebmlLength(size);
size += id.length;
return size;
}
public int getDataSize() {
int size = 0;
// TODO: one can do same calculation with for(byte[] aFrame : this.frames) size += aFrame.length;
for (long fsize : frameSizes)
size += fsize;
if (lacingPresent) {
size += muxLacingInfo().length;
size += 1; // int8 laces count, a.k.a. frame_count-1
}
size += 3; // int8 - flags; sint16 - timecode
size += EbmlUtil.ebmlLength(trackNumber);
return size;
}
private byte[] muxLacingInfo() {
if (EBML.equals(lacing))
return muxEbmlLacing(frameSizes);
if (XIPH.equals(lacing))
return muxXiphLacing(frameSizes);
if (FIXED.equals(lacing))
return new byte[0];
return null;
}
static public long ebmlDecode(ByteBuffer bb) {
byte firstByte = bb.get();
int length = EbmlUtil.computeLength(firstByte);
if (length == 0)
throw new RuntimeException("Invalid ebml integer size.");
long value = firstByte & (0xFF >>> length);
length--;
while (length > 0) {
value = (value << 8) | (bb.get() & 0xff);
length--;
}
return value;
}
static public long ebmlDecodeSigned(ByteBuffer source) {
byte firstByte = source.get();
int size = EbmlUtil.computeLength(firstByte);
if (size == 0)
throw new RuntimeException("Invalid ebml integer size.");
long value = firstByte & (0xFF >>> size);
int remaining = size-1;
while (remaining > 0){
value = (value << 8) | (source.get() & 0xff);
remaining--;
}
return value - signedComplement[size];
}
public static long[] calcEbmlLacingDiffs(int[] laceSizes) {
int lacesCount = laceSizes.length - 1;
long[] out = new long[lacesCount];
out[0] = (int) laceSizes[0];
for (int i = 1; i < lacesCount; i++) {
out[i] = laceSizes[i] - laceSizes[i - 1];
}
return out;
}
public static byte[] muxEbmlLacing(int[] laceSizes) {
ByteArrayList bytes = ByteArrayList.createByteArrayList();
long[] laceSizeDiffs = calcEbmlLacingDiffs(laceSizes);
bytes.addAll(EbmlUtil.ebmlEncode(laceSizeDiffs[0]));
for (int i = 1; i < laceSizeDiffs.length; i++) {
bytes.addAll(convertToBytes(laceSizeDiffs[i]));
}
return bytes.toArray();
}
public static byte[] muxXiphLacing(int[] laceSizes) {
ByteArrayList bytes = ByteArrayList.createByteArrayList();
for (int i = 0; i < laceSizes.length - 1; i++) {
long laceSize = laceSizes[i];
while (laceSize >= 255) {
bytes.add((byte) 255);
laceSize -= 255;
}
bytes.add((byte) laceSize);
}
return bytes.toArray();
}
}
| |
/*
* Copyright 2011 Noa Resare
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.core;
import org.bitcoinj.params.BitcoinMainNetParams;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import static org.bitcoinj.core.Utils.HEX;
import static org.junit.Assert.*;
public class BitcoinSerializerTest {
private static final byte[] ADDRESS_MESSAGE_BYTES = HEX.decode("f9beb4d96164647200000000000000001f000000" +
"ed52399b01e215104d010000000000000000000000000000000000ffff0a000001208d");
private static final byte[] TRANSACTION_MESSAGE_BYTES = HEX.withSeparator(" ", 2).decode(
"f9 be b4 d9 74 78 00 00 00 00 00 00 00 00 00 00" +
"02 01 00 00 e2 93 cd be 01 00 00 00 01 6d bd db" +
"08 5b 1d 8a f7 51 84 f0 bc 01 fa d5 8d 12 66 e9" +
"b6 3b 50 88 19 90 e4 b4 0d 6a ee 36 29 00 00 00" +
"00 8b 48 30 45 02 21 00 f3 58 1e 19 72 ae 8a c7" +
"c7 36 7a 7a 25 3b c1 13 52 23 ad b9 a4 68 bb 3a" +
"59 23 3f 45 bc 57 83 80 02 20 59 af 01 ca 17 d0" +
"0e 41 83 7a 1d 58 e9 7a a3 1b ae 58 4e de c2 8d" +
"35 bd 96 92 36 90 91 3b ae 9a 01 41 04 9c 02 bf" +
"c9 7e f2 36 ce 6d 8f e5 d9 40 13 c7 21 e9 15 98" +
"2a cd 2b 12 b6 5d 9b 7d 59 e2 0a 84 20 05 f8 fc" +
"4e 02 53 2e 87 3d 37 b9 6f 09 d6 d4 51 1a da 8f" +
"14 04 2f 46 61 4a 4c 70 c0 f1 4b ef f5 ff ff ff" +
"ff 02 40 4b 4c 00 00 00 00 00 19 76 a9 14 1a a0" +
"cd 1c be a6 e7 45 8a 7a ba d5 12 a9 d9 ea 1a fb" +
"22 5e 88 ac 80 fa e9 c7 00 00 00 00 19 76 a9 14" +
"0e ab 5b ea 43 6a 04 84 cf ab 12 48 5e fd a0 b7" +
"8b 4e cc 52 88 ac 00 00 00 00");
@Test
public void testAddr() throws Exception {
final NetworkParameters params = BitcoinMainNetParams.get();
MessageSerializer serializer = params.getDefaultSerializer();
// the actual data from https://en.bitcoin.it/wiki/Protocol_specification#addr
AddressMessage addressMessage = (AddressMessage) serializer.deserialize(ByteBuffer.wrap(ADDRESS_MESSAGE_BYTES));
assertEquals(1, addressMessage.getAddresses().size());
PeerAddress peerAddress = addressMessage.getAddresses().get(0);
assertEquals(8333, peerAddress.getPort());
assertEquals("10.0.0.1", peerAddress.getAddr().getHostAddress());
ByteArrayOutputStream bos = new ByteArrayOutputStream(ADDRESS_MESSAGE_BYTES.length);
serializer.serialize(addressMessage, bos);
assertEquals(31, addressMessage.getMessageSize());
//this wont be true due to dynamic timestamps.
//assertTrue(LazyParseByteCacheTest.arrayContains(bos.toByteArray(), addrMessage));
}
@Test
public void testCachedParsing() throws Exception {
MessageSerializer serializer = BitcoinMainNetParams.get().getSerializer(true);
// first try writing to a fields to ensure uncaching and children are not affected
Transaction transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
transaction.setLockTime(1);
// parent should have been uncached
assertFalse(transaction.isCached());
// child should remain cached.
assertTrue(transaction.getInputs().get(0).isCached());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertFalse(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
// now try writing to a child to ensure uncaching is propagated up to parent but not to siblings
transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
transaction.getInputs().get(0).setSequenceNumber(1);
// parent should have been uncached
assertFalse(transaction.isCached());
// so should child
assertFalse(transaction.getInputs().get(0).isCached());
bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertFalse(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
// deserialize/reserialize to check for equals.
transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertTrue(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
// deserialize/reserialize to check for equals. Set a field to it's existing value to trigger uncache
transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
transaction.getInputs().get(0).setSequenceNumber(transaction.getInputs().get(0).getSequenceNumber());
bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertTrue(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
}
/**
* Get 1 header of the block number 1 (the first one is 0) in the chain
*/
@Test
public void testHeaders1() throws Exception {
MessageSerializer serializer = BitcoinMainNetParams.get().getDefaultSerializer();
byte[] headersMessageBytes = HEX.decode("f9beb4d9686561" +
"646572730000000000520000005d4fab8101010000006fe28c0ab6f1b372c1a6a246ae6" +
"3f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677b" +
"a1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e3629900");
HeadersMessage headersMessage = (HeadersMessage) serializer.deserialize(ByteBuffer.wrap(headersMessageBytes));
// The first block after the genesis
// http://blockexplorer.com/b/1
Block block = headersMessage.getBlockHeaders().get(0);
assertEquals("00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048", block.getHashAsString());
assertNotNull(block.transactions);
assertEquals("0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098", Utils.HEX.encode(block.getMerkleRoot().getBytes()));
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
serializer.serialize(headersMessage, byteArrayOutputStream);
byte[] serializedBytes = byteArrayOutputStream.toByteArray();
assertArrayEquals(headersMessageBytes, serializedBytes);
}
/**
* Get 6 headers of blocks 1-6 in the chain
*/
@Test
public void testHeaders2() throws Exception {
MessageSerializer serializer = BitcoinMainNetParams.get().getDefaultSerializer();
byte[] headersMessageBytes = HEX.decode("f9beb4d96865616465" +
"72730000000000e701000085acd4ea06010000006fe28c0ab6f1b372c1a6a246ae63f74f931e" +
"8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1c" +
"db606e857233e0e61bc6649ffff001d01e3629900010000004860eb18bf1b1620e37e9490fc8a" +
"427514416fd75159ab86688e9a8300000000d5fdcc541e25de1c7a5addedf24858b8bb665c9f36" +
"ef744ee42c316022c90f9bb0bc6649ffff001d08d2bd610001000000bddd99ccfda39da1b108ce1" +
"a5d70038d0a967bacb68b6b63065f626a0000000044f672226090d85db9a9f2fbfe5f0f9609b387" +
"af7be5b7fbb7a1767c831c9e995dbe6649ffff001d05e0ed6d00010000004944469562ae1c2c74" +
"d9a535e00b6f3e40ffbad4f2fda3895501b582000000007a06ea98cd40ba2e3288262b28638cec" +
"5337c1456aaf5eedc8e9e5a20f062bdf8cc16649ffff001d2bfee0a9000100000085144a84488e" +
"a88d221c8bd6c059da090e88f8a2c99690ee55dbba4e00000000e11c48fecdd9e72510ca84f023" +
"370c9a38bf91ac5cae88019bee94d24528526344c36649ffff001d1d03e4770001000000fc33f5" +
"96f822a0a1951ffdbf2a897b095636ad871707bf5d3162729b00000000379dfb96a5ea8c81700ea4" +
"ac6b97ae9a9312b2d4301a29580e924ee6761a2520adc46649ffff001d189c4c9700");
HeadersMessage headersMessage = (HeadersMessage) serializer.deserialize(ByteBuffer.wrap(headersMessageBytes));
assertEquals(6, headersMessage.getBlockHeaders().size());
// index 0 block is the number 1 block in the block chain
// http://blockexplorer.com/b/1
Block zeroBlock = headersMessage.getBlockHeaders().get(0);
assertEquals("00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048",
zeroBlock.getHashAsString());
assertEquals(2573394689L, zeroBlock.getNonce());
// index 3 block is the number 4 block in the block chain
// http://blockexplorer.com/b/4
Block thirdBlock = headersMessage.getBlockHeaders().get(3);
assertEquals("000000004ebadb55ee9096c9a2f8880e09da59c0d68b1c228da88e48844a1485",
thirdBlock.getHashAsString());
assertEquals(2850094635L, thirdBlock.getNonce());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
serializer.serialize(headersMessage, byteArrayOutputStream);
byte[] serializedBytes = byteArrayOutputStream.toByteArray();
assertArrayEquals(headersMessageBytes, serializedBytes);
}
@Test(expected = BufferUnderflowException.class)
public void testBitcoinPacketHeaderTooShort() {
new BitcoinSerializer.BitcoinPacketHeader(ByteBuffer.wrap(new byte[] { 0 }));
}
@Test(expected = ProtocolException.class)
public void testBitcoinPacketHeaderTooLong() {
// Message with a Message size which is 1 too big, in little endian format.
byte[] wrongMessageLength = HEX.decode("000000000000000000000000010000020000000000");
new BitcoinSerializer.BitcoinPacketHeader(ByteBuffer.wrap(wrongMessageLength));
}
@Test(expected = BufferUnderflowException.class)
public void testSeekPastMagicBytes() {
// Fail in another way, there is data in the stream but no magic bytes.
byte[] brokenMessage = HEX.decode("000000");
BitcoinMainNetParams.get().getDefaultSerializer().seekPastMagicBytes(ByteBuffer.wrap(brokenMessage));
}
/**
* Tests serialization of an unknown message.
*/
@Test(expected = Error.class)
public void testSerializeUnknownMessage() throws Exception {
MessageSerializer serializer = BitcoinMainNetParams.get().getDefaultSerializer();
Message unknownMessage = new Message() {
@Override
protected void parse() throws ProtocolException {
}
};
ByteArrayOutputStream bos = new ByteArrayOutputStream(ADDRESS_MESSAGE_BYTES.length);
serializer.serialize(unknownMessage, bos);
}
}
| |
/**
* Most of the code in the Qalingo project is copyrighted Hoteia and licensed
* under the Apache License Version 2.0 (release version 0.8.0)
* http://www.apache.org/licenses/LICENSE-2.0
*
* Copyright (c) Hoteia, 2012-2014
* http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com
*
*/
package org.hoteia.qalingo.core.domain;
import java.util.Arrays;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Version;
@Entity
@Table(name="TECO_STORE_ATTRIBUTE")
public class StoreAttribute extends AbstractAttribute {
/**
* Generated UID
*/
private static final long serialVersionUID = 5904256004222204269L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name="ID", nullable=false)
private Long id;
@Version
@Column(name="VERSION", nullable=false, columnDefinition="int(11) default 1")
private int version;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name="ATTRIBUTE_DEFINITION_ID", insertable=true, updatable=true)
private AttributeDefinition attributeDefinition;
@Column(name = "SHORT_STRING_VALUE")
private String shortStringValue;
@Column(name = "LONG_STRING_VALUE")
@Lob
private String longStringValue;
@Column(name="INTEGER_VALUE")
private Integer integerValue;
@Column(name="DOUBLE_VALUE")
private Double doubleValue;
@Column(name="FLOAT_VALUE")
private Float floatValue;
@Column(name="BLOB_VALUE")
@Lob
private byte[] blobValue;
@Column(name="BOOLEAN_VALUE")
private Boolean booleanValue;
@Column(name="LOCALIZATION_CODE")
private String localizationCode;
@Column(name="MARKET_AREA_ID")
private Long marketAreaId;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="START_DATE")
private Date startDate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="END_DATE")
private Date endDate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="DATE_CREATE")
private Date dateCreate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="DATE_UPDATE")
private Date dateUpdate;
public StoreAttribute() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public AttributeDefinition getAttributeDefinition() {
return attributeDefinition;
}
public void setAttributeDefinition(AttributeDefinition attributeDefinition) {
this.attributeDefinition = attributeDefinition;
}
public String getShortStringValue() {
return shortStringValue;
}
public void setShortStringValue(String shortStringValue) {
this.shortStringValue = shortStringValue;
}
public String getLongStringValue() {
return longStringValue;
}
public void setLongStringValue(String longStringValue) {
this.longStringValue = longStringValue;
}
public Integer getIntegerValue() {
return integerValue;
}
public void setIntegerValue(Integer integerValue) {
this.integerValue = integerValue;
}
public Double getDoubleValue() {
return doubleValue;
}
public void setDoubleValue(Double doubleValue) {
this.doubleValue = doubleValue;
}
public Float getFloatValue() {
return floatValue;
}
public void setFloatValue(Float floatValue) {
this.floatValue = floatValue;
}
public byte[] getBlobValue() {
return blobValue;
}
public void setBlobValue(byte[] blobValue) {
this.blobValue = blobValue;
}
public Boolean getBooleanValue() {
return booleanValue;
}
public void setBooleanValue(Boolean booleanValue) {
this.booleanValue = booleanValue;
}
public String getLocalizationCode() {
return localizationCode;
}
public void setLocalizationCode(String localizationCode) {
this.localizationCode = localizationCode;
}
public Long getMarketAreaId() {
return marketAreaId;
}
public void setMarketAreaId(Long marketAreaId) {
this.marketAreaId = marketAreaId;
}
public Date getStartDate() {
return startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public Date getEndDate() {
return endDate;
}
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
public Date getDateCreate() {
return dateCreate;
}
public void setDateCreate(Date dateCreate) {
this.dateCreate = dateCreate;
}
public Date getDateUpdate() {
return dateUpdate;
}
public void setDateUpdate(Date dateUpdate) {
this.dateUpdate = dateUpdate;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((dateCreate == null) ? 0 : dateCreate.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
StoreAttribute other = (StoreAttribute) obj;
if (dateCreate == null) {
if (other.dateCreate != null)
return false;
} else if (!dateCreate.equals(other.dateCreate))
return false;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
@Override
public String toString() {
return "StoreAttribute [id=" + id + ", version=" + version + ", shortStringValue=" + shortStringValue + ", longStringValue=" + longStringValue + ", integerValue=" + integerValue + ", doubleValue=" + doubleValue + ", floatValue=" + floatValue
+ ", blobValue=" + Arrays.toString(blobValue) + ", booleanValue=" + booleanValue + ", localizationCode=" + localizationCode + ", marketAreaId=" + marketAreaId + ", startDate="
+ startDate + ", endDate=" + endDate + ", dateCreate=" + dateCreate + ", dateUpdate=" + dateUpdate + "]";
}
}
| |
package org.drools.compiler.kproject.models;
import com.thoughtworks.xstream.converters.MarshallingContext;
import com.thoughtworks.xstream.converters.UnmarshallingContext;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
import org.drools.core.BeliefSystemType;
import org.drools.core.util.AbstractXStreamConverter;
import org.kie.api.builder.model.FileLoggerModel;
import org.kie.api.builder.model.KieBaseModel;
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.builder.model.ListenerModel;
import org.kie.api.builder.model.WorkItemHandlerModel;
import org.kie.api.runtime.conf.BeliefSystemTypeOption;
import org.kie.api.runtime.conf.ClockTypeOption;
import java.util.ArrayList;
import java.util.List;
public class KieSessionModelImpl
implements
KieSessionModel {
private String name;
private KieSessionType type = KieSessionType.STATEFUL;
private ClockTypeOption clockType = ClockTypeOption.get( "realtime" );
private BeliefSystemTypeOption beliefSystem = BeliefSystemTypeOption.get(BeliefSystemType.SIMPLE.toString());
private String scope = "javax.enterprise.context.ApplicationScoped";
private KieBaseModelImpl kBase;
private final List<ListenerModel> listeners = new ArrayList<ListenerModel>();
private final List<WorkItemHandlerModel> wihs = new ArrayList<WorkItemHandlerModel>();
private boolean isDefault = false;
private String consoleLogger;
private FileLoggerModel fileLogger;
private KieSessionModelImpl() { }
public KieSessionModelImpl(KieBaseModelImpl kBase, String name) {
this.kBase = kBase;
this.name = name;
}
public KieBaseModelImpl getKieBaseModel() {
return kBase;
}
public boolean isDefault() {
return isDefault;
}
public void setKBase(KieBaseModel kieBaseModel) {
this.kBase = (KieBaseModelImpl) kieBaseModel;
}
public KieSessionModel setDefault(boolean isDefault) {
this.isDefault = isDefault;
return this;
}
/* (non-Javadoc)
* @see org.kie.kproject.KieSessionModel#getName()
*/
public String getName() {
return name;
}
public KieSessionModel setName(String name) {
kBase.changeKSessionName(this, this.name, name);
this.name = name;
return this;
}
/* (non-Javadoc)
* @see org.kie.kproject.KieSessionModel#getType()
*/
public KieSessionType getType() {
return type;
}
/* (non-Javadoc)
* @see org.kie.kproject.KieSessionModel#setType(java.lang.String)
*/
public KieSessionModel setType(KieSessionType type) {
this.type = type;
return this;
}
/* (non-Javadoc)
* @see org.kie.kproject.KieSessionModel#getClockType()
*/
public ClockTypeOption getClockType() {
return clockType;
}
/* (non-Javadoc)
* @see org.kie.kproject.KieSessionModel#setClockType(org.kie.api.runtime.conf.ClockTypeOption)
*/
public KieSessionModel setClockType(ClockTypeOption clockType) {
this.clockType = clockType;
return this;
}
public BeliefSystemTypeOption getBeliefSystem() {
return beliefSystem;
}
public KieSessionModel setBeliefSystem(BeliefSystemTypeOption beliefSystem) {
this.beliefSystem = beliefSystem;
return this;
}
@Override
public KieSessionModel setScope(String scope) {
this.scope = scope;
return this;
}
@Override
public String getScope() {
return this.scope;
}
public ListenerModel newListenerModel(String type, ListenerModel.Kind kind) {
ListenerModelImpl listenerModel = new ListenerModelImpl(this, type, kind);
listeners.add(listenerModel);
return listenerModel;
}
public List<ListenerModel> getListenerModels() {
return listeners;
}
private List<ListenerModel> getListenerModels(ListenerModel.Kind kind) {
List<ListenerModel> listeners = new ArrayList<ListenerModel>();
for (ListenerModel listener : getListenerModels()) {
if (listener.getKind() == kind) {
listeners.add(listener);
}
}
return listeners;
}
private void addListenerModel(ListenerModel listener) {
listeners.add(listener);
}
public WorkItemHandlerModel newWorkItemHandlerModel(String name, String type) {
WorkItemHandlerModelImpl wihModel = new WorkItemHandlerModelImpl(this, name, type);
wihs.add(wihModel);
return wihModel;
}
public List<WorkItemHandlerModel> getWorkItemHandlerModels() {
return wihs;
}
private void addWorkItemHandelerModel(WorkItemHandlerModel wih) {
wihs.add(wih);
}
public String getConsoleLogger() {
return consoleLogger;
}
public KieSessionModel setConsoleLogger(String consoleLogger) {
this.consoleLogger = consoleLogger;
return this;
}
public FileLoggerModel getFileLogger() {
return fileLogger;
}
public KieSessionModel setFileLogger(String fileName) {
this.fileLogger = new FileLoggerModelImpl(fileName);
return this;
}
public KieSessionModel setFileLogger(String fileName, int interval, boolean threaded) {
this.fileLogger = new FileLoggerModelImpl(fileName, interval, threaded);
return this;
}
@Override
public String toString() {
return "KieSessionModel [name=" + name + ", clockType=" + clockType + "]";
}
public static class KSessionConverter extends AbstractXStreamConverter {
public KSessionConverter() {
super(KieSessionModelImpl.class);
}
public void marshal(Object value, HierarchicalStreamWriter writer, MarshallingContext context) {
KieSessionModelImpl kSession = (KieSessionModelImpl) value;
writer.addAttribute("name", kSession.getName());
writer.addAttribute("type", kSession.getType().toString().toLowerCase() );
writer.addAttribute( "default", Boolean.toString(kSession.isDefault()) );
if (kSession.getClockType() != null) {
writer.addAttribute("clockType", kSession.getClockType().getClockType());
}
if ( kSession.getBeliefSystem() != null ) {
writer.addAttribute( "beliefSystem", kSession.getBeliefSystem().getBeliefSystemType().toLowerCase() );
}
if (kSession.getScope() != null) {
writer.addAttribute("scope", kSession.getScope() );
}
if (kSession.getConsoleLogger() != null) {
writer.startNode("consoleLogger");
if (kSession.getConsoleLogger().length() > 0) {
writer.addAttribute("name", kSession.getConsoleLogger());
}
writer.endNode();
}
if (kSession.getFileLogger() != null) {
writer.startNode("fileLogger");
writer.addAttribute("file", kSession.getFileLogger().getFile());
writer.addAttribute("threaded", "" + kSession.getFileLogger().isThreaded());
writer.addAttribute("interval", "" + kSession.getFileLogger().getInterval());
writer.endNode();
}
writeObjectList(writer, context, "workItemHandlers", "workItemHandler", kSession.getWorkItemHandlerModels());
if (!kSession.getListenerModels().isEmpty()) {
writer.startNode("listeners");
for (ListenerModel listener : kSession.getListenerModels(ListenerModel.Kind.RULE_RUNTIME_EVENT_LISTENER)) {
writeObject(writer, context, listener.getKind().toString(), listener);
}
for (ListenerModel listener : kSession.getListenerModels(ListenerModel.Kind.AGENDA_EVENT_LISTENER)) {
writeObject(writer, context, listener.getKind().toString(), listener);
}
for (ListenerModel listener : kSession.getListenerModels(ListenerModel.Kind.PROCESS_EVENT_LISTENER)) {
writeObject(writer, context, listener.getKind().toString(), listener);
}
writer.endNode();
}
}
public Object unmarshal(HierarchicalStreamReader reader, final UnmarshallingContext context) {
final KieSessionModelImpl kSession = new KieSessionModelImpl();
kSession.name = reader.getAttribute("name");
kSession.setDefault( "true".equals(reader.getAttribute( "default" )) );
String kSessionType = reader.getAttribute("type");
kSession.setType(kSessionType != null ? KieSessionType.valueOf( kSessionType.toUpperCase() ) : KieSessionType.STATEFUL);
String clockType = reader.getAttribute("clockType");
if (clockType != null) {
kSession.setClockType(ClockTypeOption.get(clockType));
}
String beliefSystem = reader.getAttribute( "beliefSystem" );
if ( beliefSystem != null ) {
kSession.setBeliefSystem( BeliefSystemTypeOption.get( beliefSystem ) );
}
String scope = reader.getAttribute("scope");
if (scope != null) {
kSession.setScope( scope );
}
readNodes( reader, new AbstractXStreamConverter.NodeReader() {
public void onNode(HierarchicalStreamReader reader,
String name,
String value) {
if ("listeners".equals( name )) {
while (reader.hasMoreChildren()) {
reader.moveDown();
String nodeName = reader.getNodeName();
ListenerModelImpl listener = readObject(reader, context, ListenerModelImpl.class);
listener.setKSession( kSession );
listener.setKind(ListenerModel.Kind.fromString(nodeName));
kSession.addListenerModel(listener);
reader.moveUp();
}
} else if ( "workItemHandlers".equals( name ) ) {
List<WorkItemHandlerModelImpl> wihs = readObjectList(reader, context, WorkItemHandlerModelImpl.class);
for (WorkItemHandlerModelImpl wih : wihs) {
wih.setKSession( kSession );
kSession.addWorkItemHandelerModel(wih);
}
} else if ( "consoleLogger".equals( name ) ) {
String consoleLogger = reader.getAttribute("name");
kSession.setConsoleLogger(consoleLogger == null ? "" : consoleLogger);
} else if ( "fileLogger".equals( name ) ) {
FileLoggerModelImpl fileLoggerModel = new FileLoggerModelImpl( reader.getAttribute("file") );
try {
fileLoggerModel.setInterval( Integer.parseInt(reader.getAttribute("interval")) );
} catch (Exception e) { }
try {
fileLoggerModel.setThreaded( Boolean.parseBoolean(reader.getAttribute("threaded")) );
} catch (Exception e) { }
kSession.fileLogger = fileLoggerModel;
}
}
} );
return kSession;
}
}
}
| |
package edu.ucdenver.ccp.datasource.identifiers;
/*
* #%L
* Colorado Computational Pharmacology's common module
* %%
* Copyright (C) 2012 - 2014 Regents of the University of Colorado
* %%
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
import java.util.HashSet;
import java.util.Set;
import org.apache.log4j.Logger;
import edu.ucdenver.ccp.common.string.StringConstants;
import edu.ucdenver.ccp.common.string.StringUtil;
import edu.ucdenver.ccp.datasource.identifiers.bind.BindInteractionID;
import edu.ucdenver.ccp.datasource.identifiers.dip.DipInteractionID;
import edu.ucdenver.ccp.datasource.identifiers.dip.DipInteractorID;
import edu.ucdenver.ccp.datasource.identifiers.drugbank.DrugBankID;
import edu.ucdenver.ccp.datasource.identifiers.drugbank.DrugCodeDirectoryID;
import edu.ucdenver.ccp.datasource.identifiers.drugbank.DrugsProductDatabaseID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.embl.EmblID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.intact.IntActID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.interpro.InterProID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.interpro.PirID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.interpro.TigrFamsID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.ipi.IpiID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtID;
import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtIsoformID;
import edu.ucdenver.ccp.datasource.identifiers.ensembl.EnsemblGeneID;
import edu.ucdenver.ccp.datasource.identifiers.flybase.FlyBaseID;
import edu.ucdenver.ccp.datasource.identifiers.hgnc.HgncGeneSymbolID;
import edu.ucdenver.ccp.datasource.identifiers.hgnc.HgncID;
import edu.ucdenver.ccp.datasource.identifiers.hprd.HprdID;
import edu.ucdenver.ccp.datasource.identifiers.kegg.KeggCompoundID;
import edu.ucdenver.ccp.datasource.identifiers.kegg.KeggDrugID;
import edu.ucdenver.ccp.datasource.identifiers.kegg.KeggGeneID;
import edu.ucdenver.ccp.datasource.identifiers.kegg.KeggPathwayID;
import edu.ucdenver.ccp.datasource.identifiers.mgi.MgiGeneID;
import edu.ucdenver.ccp.datasource.identifiers.mint.MintID;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.GenBankID;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.gene.EntrezGeneID;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.gene.GiNumberID;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.homologene.HomologeneGroupID;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.omim.OmimID;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.refseq.RefSeqID;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.snp.SnpRsId;
import edu.ucdenver.ccp.datasource.identifiers.ncbi.taxonomy.NcbiTaxonomyID;
import edu.ucdenver.ccp.datasource.identifiers.obo.CellTypeOntologyID;
import edu.ucdenver.ccp.datasource.identifiers.obo.ChebiOntologyID;
import edu.ucdenver.ccp.datasource.identifiers.obo.GeneOntologyID;
import edu.ucdenver.ccp.datasource.identifiers.obo.MammalianPhenotypeID;
import edu.ucdenver.ccp.datasource.identifiers.obo.ProteinOntologyId;
import edu.ucdenver.ccp.datasource.identifiers.obo.SequenceOntologyId;
import edu.ucdenver.ccp.datasource.identifiers.other.AnimalQtlDbID;
import edu.ucdenver.ccp.datasource.identifiers.other.AphidBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.ApiDbCryptoDbID;
import edu.ucdenver.ccp.datasource.identifiers.other.BeeBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.BeetleBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.BindingDbId;
import edu.ucdenver.ccp.datasource.identifiers.other.BioGridID;
import edu.ucdenver.ccp.datasource.identifiers.other.CgncID;
import edu.ucdenver.ccp.datasource.identifiers.other.ChemSpiderId;
import edu.ucdenver.ccp.datasource.identifiers.other.ClinicalTrialsGovId;
import edu.ucdenver.ccp.datasource.identifiers.other.DailyMedId;
import edu.ucdenver.ccp.datasource.identifiers.other.DbjID;
import edu.ucdenver.ccp.datasource.identifiers.other.DictyBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.EcoCycID;
import edu.ucdenver.ccp.datasource.identifiers.other.EcoGeneID;
import edu.ucdenver.ccp.datasource.identifiers.other.EmbID;
import edu.ucdenver.ccp.datasource.identifiers.other.GdbId;
import edu.ucdenver.ccp.datasource.identifiers.other.GeoId;
import edu.ucdenver.ccp.datasource.identifiers.other.ImgtID;
import edu.ucdenver.ccp.datasource.identifiers.other.IsrctnId;
import edu.ucdenver.ccp.datasource.identifiers.other.IupharLigandId;
import edu.ucdenver.ccp.datasource.identifiers.other.MaizeGdbID;
import edu.ucdenver.ccp.datasource.identifiers.other.MiRBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.NasoniaBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.NationalDrugCodeDirectoryId;
import edu.ucdenver.ccp.datasource.identifiers.other.PathemaID;
import edu.ucdenver.ccp.datasource.identifiers.other.PbrID;
import edu.ucdenver.ccp.datasource.identifiers.other.PseudoCapID;
import edu.ucdenver.ccp.datasource.identifiers.other.PubChemBioAssayId;
import edu.ucdenver.ccp.datasource.identifiers.other.PubChemCompoundId;
import edu.ucdenver.ccp.datasource.identifiers.other.PubChemSubstanceId;
import edu.ucdenver.ccp.datasource.identifiers.other.RatMapID;
import edu.ucdenver.ccp.datasource.identifiers.other.TairID;
import edu.ucdenver.ccp.datasource.identifiers.other.TherapeuticTargetsDatabaseId;
import edu.ucdenver.ccp.datasource.identifiers.other.UniParcID;
import edu.ucdenver.ccp.datasource.identifiers.other.VbrcID;
import edu.ucdenver.ccp.datasource.identifiers.other.VectorBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.VegaID;
import edu.ucdenver.ccp.datasource.identifiers.other.WikipediaId;
import edu.ucdenver.ccp.datasource.identifiers.other.XenBaseID;
import edu.ucdenver.ccp.datasource.identifiers.other.ZfinID;
import edu.ucdenver.ccp.datasource.identifiers.pdb.PdbID;
import edu.ucdenver.ccp.datasource.identifiers.pdb.PdbLigandId;
import edu.ucdenver.ccp.datasource.identifiers.pharmgkb.PharmGkbID;
import edu.ucdenver.ccp.datasource.identifiers.psi.PsiModId;
import edu.ucdenver.ccp.datasource.identifiers.reactome.ReactomeReactionID;
import edu.ucdenver.ccp.datasource.identifiers.rgd.RgdID;
import edu.ucdenver.ccp.datasource.identifiers.sgd.SgdID;
import edu.ucdenver.ccp.datasource.identifiers.transfac.TransfacGeneID;
import edu.ucdenver.ccp.datasource.identifiers.wormbase.WormBaseID;
import edu.ucdenver.ccp.identifier.publication.PubMedID;
/**
* provides various methods to map from an ID in database or ontology files to
* instances of identifier classes under
* edu.ucdenver.ccp.datasource.identifiers.
*
* These are basically factory methods. Given some information about where the
* ID came from and an ID string, it creates an instance of an identifier class
* related to the source. This is done for DataSourceIdentifiers, PMID
* identifiers and others.
*
* Three functions named resolveId(): - a value of the DataSource enum and an ID
* string. - a name of a data source and and ID string. - an ID string that is
* parsed to discover the data source it came from.
**/
public class DataSourceIdResolver {
private static final String IREFWEB_ENTREZGENE_ID_PREFIX = "entrezgene/locuslink:";
private static final Logger logger = Logger.getLogger(DataSourceIdResolver.class);
public static DataSourceIdentifier<?> resolveId(DataSource dataSource, String databaseObjectID) {
switch (dataSource) {
case CLINICAL_TRIALS_GOV:
return new ClinicalTrialsGovId(databaseObjectID);
case DIP:
if (databaseObjectID.matches("DIP-\\d+N"))
return new DipInteractorID(databaseObjectID);
if (databaseObjectID.matches("DIP-\\d+E"))
return new DipInteractionID(databaseObjectID);
throw new IllegalArgumentException(String.format("Invalid DIP Interactor ID detected %s", databaseObjectID));
case EG:
return new EntrezGeneID(databaseObjectID);
case EMBL:
return new EmblID(databaseObjectID);
case ISRCTN:
return new IsrctnId(databaseObjectID);
case GDB:
return new GdbId(databaseObjectID);
case GENBANK:
return new GenBankID(databaseObjectID);
case GEO:
return new GeoId(databaseObjectID);
case MGI:
return new MgiGeneID(databaseObjectID);
case PHARMGKB:
return new PharmGkbID(databaseObjectID);
case PR:
return new ProteinOntologyId(databaseObjectID);
case PUBCHEM_SUBSTANCE:
return new PubChemSubstanceId(databaseObjectID);
case PUBCHEM_COMPOUND:
return new PubChemCompoundId(databaseObjectID);
case PUBCHEM_BIOASSAY:
return new PubChemBioAssayId(databaseObjectID);
case HPRD:
return new HprdID(databaseObjectID);
case HGNC:
return new HgncGeneSymbolID(databaseObjectID);
case OMIM:
return new OmimID(databaseObjectID);
case PDB:
return new PdbID(databaseObjectID);
case PIR:
return new PirID(databaseObjectID);
case REFSEQ:
return new RefSeqID(databaseObjectID);
case RGD:
return new RgdID(databaseObjectID);
case TRANSFAC:
return new TransfacGeneID(databaseObjectID);
case UNIPROT:
if (databaseObjectID.contains(StringConstants.HYPHEN_MINUS))
return new UniProtIsoformID(databaseObjectID);
return new UniProtID(databaseObjectID);
default:
throw new IllegalArgumentException(String.format(
"Resolving the ID's for this DataSource are not yet implemented: %s.", dataSource.name()));
}
}
// TODO: remove this method and replace its use with resolveId(DataSource,
// String)
public static DataSourceIdentifier<?> resolveId(String databaseName, String databaseObjectID) {
if (databaseName.equalsIgnoreCase("MGI"))
return new MgiGeneID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("RGD"))
return new RgdID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("UniProtKB"))
return new UniProtID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("chebi"))
return new ChebiOntologyID("CHEBI:" + databaseObjectID);
else if (databaseName.equalsIgnoreCase("DIP"))
return new DipInteractorID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("drugbank"))
return new DrugBankID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("keggdrug"))
return new KeggDrugID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("keggcompound"))
return new KeggCompoundID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("pubchemcompound"))
return new PubChemCompoundId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("pubchemsubstance"))
return new PubChemSubstanceId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("EG"))
return new EntrezGeneID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("Ensembl"))
return new EnsemblGeneID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("EMBL"))
return new EmblID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("RefSeq"))
return new RefSeqID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("VEGA"))
return new VegaID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("NCBI-GI"))
return new GiNumberID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("NCBI-GeneID"))
return new EntrezGeneID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("OMIM"))
return new OmimID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("HGNC"))
return new HgncID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("HPRD"))
return new HprdID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("UniProt"))
return new UniProtID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("PharmGKB"))
return new PharmGkbID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("TTD"))
return new TherapeuticTargetsDatabaseId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("bindingDb"))
return new BindingDbId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("chemSpider"))
return new ChemSpiderId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("dpd"))
return new DrugsProductDatabaseID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("DailyMed"))
return new DailyMedId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("HET"))
return new PdbLigandId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("iupharLigand"))
return new IupharLigandId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("ndc"))
return new NationalDrugCodeDirectoryId(databaseObjectID);
else if (databaseName.equalsIgnoreCase("pdb")) {
if (databaseObjectID.length() == 3) {
return new PdbLigandId(databaseObjectID);
}
return new PdbID(databaseObjectID);
} else if (databaseName.equalsIgnoreCase("Drugs Product Database (DPD)")
|| databaseName.equalsIgnoreCase("DPD"))
return new DrugsProductDatabaseID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("National Drug Code Directory"))
return new DrugCodeDirectoryID(databaseObjectID);
else if (databaseName.equalsIgnoreCase("url")) {
if (databaseObjectID.startsWith("http://en.wikipedia.org/wiki/")) {
return new WikipediaId(StringUtil.removePrefix(databaseObjectID, "http://en.wikipedia.org/wiki/"));
}
} else if (databaseName.equalsIgnoreCase("GenBank") || databaseName.equalsIgnoreCase("GenBank Gene Database")
|| databaseName.equalsIgnoreCase("GenBank Protein Database"))
return new GenBankID(databaseObjectID);
logger.warn("Unable to resolve data source identifier: datasource=" + databaseName + " id=" + databaseObjectID
+ ". Using UnknownDataSourceIdentifier.");
return new UnknownDataSourceIdentifier(databaseObjectID, databaseName);
}
/**
* Resolve provided id to an instance of {@link DataSourceIdentifier}.
*
* @param geneIDStr
* @return if can be resolved, id instance; otherwise, null
*/
public static DataSourceIdentifier<?> resolveId(String geneIDStr) {
try {
if (geneIDStr.startsWith("MGI:"))
return new MgiGeneID(geneIDStr);
else if (geneIDStr.startsWith("ncbi-geneid:"))
return new EntrezGeneID(StringUtil.removePrefix(geneIDStr, "ncbi-geneid:"));
else if (geneIDStr.startsWith(IREFWEB_ENTREZGENE_ID_PREFIX))
return new EntrezGeneID(StringUtil.removePrefix(geneIDStr, IREFWEB_ENTREZGENE_ID_PREFIX));
else if (geneIDStr.startsWith("Ensembl:"))
return new EnsemblGeneID(StringUtil.removePrefix(geneIDStr, "Ensembl:"));
else if (geneIDStr.startsWith("refseq:"))
return new RefSeqID(StringUtil.removePrefix(geneIDStr, "refseq:"));
else if (StringUtil.startsWithRegex(geneIDStr.toLowerCase(), "uniprot.*?:")) {
geneIDStr = StringUtil.removePrefixRegex(geneIDStr.toLowerCase(), "uniprot.*?:");
if (geneIDStr.contains(StringConstants.HYPHEN_MINUS))
return new UniProtIsoformID(geneIDStr.toUpperCase());
return new UniProtID(geneIDStr.toUpperCase());
} else if (geneIDStr.startsWith("Swiss-Prot:"))
return new UniProtID(StringUtil.removePrefix(geneIDStr, "Swiss-Prot:"));
else if (geneIDStr.startsWith("TREMBL:"))
return new UniProtID(StringUtil.removePrefix(geneIDStr, "TREMBL:"));
else if (geneIDStr.startsWith("TAIR:"))
return new TairID(StringUtil.removePrefix(geneIDStr, "TAIR:"));
else if (geneIDStr.startsWith("MaizeGDB:"))
return new MaizeGdbID(StringUtil.removePrefix(geneIDStr, "MaizeGDB:"));
else if (geneIDStr.startsWith("WormBase:"))
return new WormBaseID(StringUtil.removePrefix(geneIDStr, "WormBase:"));
else if (geneIDStr.startsWith("BEEBASE:"))
return new BeeBaseID(StringUtil.removePrefix(geneIDStr, "BEEBASE:"));
else if (geneIDStr.startsWith("NASONIABASE:"))
return new NasoniaBaseID(StringUtil.removePrefix(geneIDStr, "NASONIABASE:"));
else if (geneIDStr.startsWith("VectorBase:"))
return new VectorBaseID(StringUtil.removePrefix(geneIDStr, "VectorBase:"));
else if (geneIDStr.startsWith("APHIDBASE:"))
return new AphidBaseID(StringUtil.removePrefix(geneIDStr, "APHIDBASE:"));
else if (geneIDStr.startsWith("BEETLEBASE:"))
return new BeetleBaseID(StringUtil.removePrefix(geneIDStr, "BEETLEBASE:"));
else if (geneIDStr.toUpperCase().startsWith("FLYBASE:"))
return new FlyBaseID(StringUtil.removePrefix(geneIDStr.toUpperCase(), "FLYBASE:"));
else if (geneIDStr.startsWith("ZFIN:"))
return new ZfinID(StringUtil.removePrefix(geneIDStr, "ZFIN:"));
else if (geneIDStr.startsWith("AnimalQTLdb:"))
return new AnimalQtlDbID(StringUtil.removePrefix(geneIDStr, "AnimalQTLdb:"));
else if (geneIDStr.startsWith("RGD:"))
return new RgdID(StringUtil.removePrefix(geneIDStr, "RGD:"));
else if (geneIDStr.startsWith("PBR:"))
return new PbrID(StringUtil.removePrefix(geneIDStr, "PBR:"));
else if (geneIDStr.startsWith("VBRC:"))
return new VbrcID(StringUtil.removePrefix(geneIDStr, "VBRC:"));
else if (geneIDStr.startsWith("Pathema:"))
return new PathemaID(StringUtil.removePrefix(geneIDStr, "Pathema:"));
else if (geneIDStr.startsWith("PseudoCap:"))
return new PseudoCapID(StringUtil.removePrefix(geneIDStr, "PseudoCap:"));
else if (geneIDStr.startsWith("ApiDB_CryptoDB:"))
return new ApiDbCryptoDbID(StringUtil.removePrefix(geneIDStr, "ApiDB_CryptoDB:"));
else if (geneIDStr.startsWith("dictyBase:"))
return new DictyBaseID(StringUtil.removePrefix(geneIDStr, "dictyBase:"));
else if (geneIDStr.startsWith("UniProtKB/Swiss-Prot:"))
return new UniProtID(StringUtil.removePrefix(geneIDStr, "UniProtKB/Swiss-Prot:"));
else if (geneIDStr.startsWith("InterPro:"))
return new InterProID(StringUtil.removePrefix(geneIDStr, "InterPro:"));
else if (geneIDStr.startsWith("EcoGene:"))
return new EcoGeneID(StringUtil.removePrefix(geneIDStr, "EcoGene:"));
else if (geneIDStr.toUpperCase().startsWith("ECOCYC:"))
return new EcoCycID(StringUtil.removePrefix(geneIDStr.toUpperCase(), "ECOCYC:"));
else if (geneIDStr.startsWith("SGD:"))
return new SgdID(StringUtil.removePrefix(geneIDStr, "SGD:"));
else if (geneIDStr.startsWith("RATMAP:"))
return new RatMapID(StringUtil.removePrefix(geneIDStr, "RATMAP:"));
else if (geneIDStr.startsWith("Xenbase:"))
return new XenBaseID(StringUtil.removePrefix(geneIDStr, "Xenbase:"));
else if (geneIDStr.startsWith("CGNC:"))
return new CgncID(StringUtil.removePrefix(geneIDStr, "CGNC:"));
else if (geneIDStr.startsWith("HGNC:"))
return new HgncID(geneIDStr);
else if (geneIDStr.startsWith("MIM:"))
return new OmimID(StringUtil.removePrefix(geneIDStr, "MIM:"));
else if (geneIDStr.startsWith("HPRD:"))
return new HprdID(StringUtil.removePrefix(geneIDStr, "HPRD:"));
else if (geneIDStr.startsWith("IMGT/GENE-DB:"))
return new ImgtID(StringUtil.removePrefix(geneIDStr, "IMGT/GENE-DB:"));
else if (geneIDStr.startsWith("PDB:"))
return new PdbID(StringUtil.removePrefix(geneIDStr, "PDB:"));
else if (geneIDStr.toLowerCase().startsWith("gb:"))
return new GenBankID(StringUtil.removePrefix(geneIDStr.toLowerCase(), "gb:").toUpperCase());
else if (geneIDStr.startsWith("emb:"))
return new EmbID(StringUtil.removePrefix(geneIDStr, "emb:"));
else if (geneIDStr.startsWith("dbj:"))
return new DbjID(StringUtil.removePrefix(geneIDStr, "dbj:"));
else if (geneIDStr.startsWith("intact:"))
return new IntActID(StringUtil.removePrefix(geneIDStr, "intact:"));
else if (geneIDStr.startsWith("RefSeq:"))
return new RefSeqID(StringUtil.removePrefix(geneIDStr, "RefSeq:"));
else if (geneIDStr.startsWith("uniparc:"))
return new UniParcID(StringUtil.removePrefix(geneIDStr, "uniparc:"));
else if (geneIDStr.startsWith("genbank_protein_gi:"))
return new GiNumberID(StringUtil.removePrefix(geneIDStr, "genbank_protein_gi:"));
else if (geneIDStr.toLowerCase().startsWith("pir:"))
return new PirID(StringUtil.removePrefix(geneIDStr.toLowerCase(), "pir:").toUpperCase());
else if (geneIDStr.startsWith("pubmed:"))
return new PubMedID(StringUtil.removePrefix(geneIDStr, "pubmed:"));
else if (geneIDStr.startsWith("dip:") && geneIDStr.endsWith("N"))
return new DipInteractorID(StringUtil.removePrefix(geneIDStr, "dip:"));
else if (geneIDStr.startsWith("dip:") && geneIDStr.endsWith("E"))
return new DipInteractionID(StringUtil.removePrefix(geneIDStr, "dip:"));
else if (geneIDStr.startsWith("TIGR:"))
return new TigrFamsID(StringUtil.removePrefix(geneIDStr, "TIGR:"));
else if (geneIDStr.startsWith("ipi:"))
return new IpiID(StringUtil.removePrefix(geneIDStr, "ipi:"));
else if (geneIDStr.startsWith("mint:"))
return new MintID(StringUtil.removePrefix(geneIDStr, "mint:"));
else if (geneIDStr.startsWith("Reactome:"))
return new ReactomeReactionID(StringUtil.removePrefix(geneIDStr, "Reactome:"));
else if (geneIDStr.startsWith("miRBase:"))
return new MiRBaseID(StringUtil.removePrefix(geneIDStr, "miRBase:"));
else if (geneIDStr.startsWith("PR:"))
return new ProteinOntologyId(geneIDStr);
else if (geneIDStr.startsWith("SO:"))
return new SequenceOntologyId(geneIDStr);
else if (geneIDStr.startsWith("GO:"))
return new GeneOntologyID(geneIDStr);
else if (geneIDStr.startsWith("CHEBI:"))
return new ChebiOntologyID(geneIDStr);
else if (geneIDStr.startsWith("MP:"))
return new MammalianPhenotypeID(geneIDStr);
else if (geneIDStr.startsWith("MOD:"))
return new PsiModId(geneIDStr);
else if (geneIDStr.startsWith("KEGG_"))
return new KeggGeneID(geneIDStr);
else if (geneIDStr.startsWith("KEGG_PATHWAY"))
return new KeggPathwayID(geneIDStr);
else if (geneIDStr.startsWith("EG_"))
return new EntrezGeneID(StringUtil.removePrefix(geneIDStr, "EG_"));
else if (geneIDStr.startsWith("HOMOLOGENE_GROUP_"))
return new HomologeneGroupID(StringUtil.removePrefix(geneIDStr, "HOMOLOGENE_GROUP_"));
else if (geneIDStr.matches("IPR\\d+"))
return new InterProID(geneIDStr);
else if (geneIDStr.matches("rs\\d+"))
return new SnpRsId(geneIDStr);
else if (geneIDStr.startsWith("CL:"))
return new CellTypeOntologyID(geneIDStr);
else if (geneIDStr.startsWith("Vega:"))
return new VegaID(StringUtil.removePrefix(geneIDStr, "Vega:"));
else if (geneIDStr.startsWith("NCBITaxon:"))
return new NcbiTaxonomyID(StringUtil.removePrefix(geneIDStr, "NCBITaxon:"));
logger.warn(String.format("Unhandled gene ID format: %s. Creating UnknownDataSourceIdentifier.", geneIDStr));
return new UnknownDataSourceIdentifier(geneIDStr);
} catch (IllegalArgumentException e) {
logger.warn("Invalid ID detected... " + e.getMessage());
return new ProbableErrorDataSourceIdentifier(geneIDStr, null, e.getMessage());
}
}
/**
* Resolve interaction id to {@link DataSourceIdentifier}.
*
* @param interactionIDStr
* id to resolve
* @return identifier if argument is resolvable and supported; otherwise,
* return null.
*/
private static DataSourceIdentifier<?> resolveInteractionID(String interactionIDStr) {
if (interactionIDStr.startsWith("intact:")) {
return new IntActID(StringUtil.removePrefix(interactionIDStr, "intact:"));
} else if (interactionIDStr.startsWith("bind:")) {
return new BindInteractionID(StringUtil.removePrefix(interactionIDStr, "bind:"));
} else if (interactionIDStr.startsWith("grid:")) {
return new BioGridID(StringUtil.removePrefix(interactionIDStr, "grid:"));
} else if (interactionIDStr.startsWith("mint:")) {
return new MintID(StringUtil.removePrefix(interactionIDStr, "mint:"));
}
logger.warn(String.format("Unknown interaction ID format: %s. Cannot create DataElementIdentifier<?>.",
interactionIDStr));
return new UnknownDataSourceIdentifier(interactionIDStr);
}
/**
* Resolve interaction IDs to list of {@link DataSourceIdentifier}.
*
* @param interactionIDStrs
* ids to resolve
* @return identifier if all members of <code>interactionIDStrs</code> are
* resolvable and supported; otherwise, return null.
*/
public static Set<DataSourceIdentifier<?>> resolveInteractionIDs(Set<String> interactionIDStrs) {
Set<DataSourceIdentifier<?>> interactionIDs = new HashSet<DataSourceIdentifier<?>>();
for (String interactionIDStr : interactionIDStrs) {
DataSourceIdentifier<?> id = resolveInteractionID(interactionIDStr);
if (id != null) {
interactionIDs.add(id);
}
}
return interactionIDs;
}
/**
* Resolve Pubmed ID from value that starts with prefix 'pubmed:'.
*
* @param pmidStr
* @return id if value following prefix is a positive integer; otherwise,
* null
*/
public static DataSourceIdentifier<?> resolvePubMedID(String pmidStr) {
String prefix = "pubmed:";
if (pmidStr.startsWith(prefix)) {
String id = StringUtil.removePrefix(pmidStr, prefix);
if (StringUtil.isIntegerGreaterThanZero(id)) {
return new PubMedID(id);
}
}
logger.warn(String.format("Unknown PubMed ID format: %s. Cannot create PubMedID.", pmidStr));
return new ProbableErrorDataSourceIdentifier(pmidStr, null, "Invalid PubMedID, must be an integer.");
}
public static Set<DataSourceIdentifier<?>> resolvePubMedIDs(Set<String> pmidStrs) {
Set<DataSourceIdentifier<?>> pmids = new HashSet<DataSourceIdentifier<?>>();
for (String pmidStr : pmidStrs) {
DataSourceIdentifier<?> id = resolvePubMedID(pmidStr);
if (id == null) {
return null;
}
pmids.add(id);
}
return pmids;
}
public static Set<DataSourceIdentifier<?>> resolveIds(Set<String> databaseObjectIDStrs) {
Set<DataSourceIdentifier<?>> databaseObjectIDs = new HashSet<DataSourceIdentifier<?>>();
for (String databaseObjectIDStr : databaseObjectIDStrs) {
DataSourceIdentifier<?> id = resolveId(databaseObjectIDStr);
if (id != null) {
databaseObjectIDs.add(id);
}
}
return databaseObjectIDs;
}
}
| |
/*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.jmx;
import com.hazelcast.spi.properties.GroupProperty;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.IntrospectionException;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.MBeanOperationInfo;
import javax.management.MBeanRegistration;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
/**
* Base class for beans registered to JMX by Hazelcast.
*
* @param <T> represents management bean to be registered.
*/
public abstract class HazelcastMBean<T> implements DynamicMBean, MBeanRegistration {
protected HashMap<String, BeanInfo> attributeMap = new HashMap<String, BeanInfo>();
protected HashMap<String, BeanInfo> operationMap = new HashMap<String, BeanInfo>();
protected final long updateIntervalSec;
final T managedObject;
final ManagementService service;
String description;
ObjectName objectName;
protected HazelcastMBean(T managedObject, ManagementService service) {
this.managedObject = managedObject;
this.service = service;
updateIntervalSec = service.instance.node.getProperties().getLong(GroupProperty.JMX_UPDATE_INTERVAL_SECONDS);
}
public void register(HazelcastMBean mbean) {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try {
mbs.registerMBean(mbean, mbean.objectName);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private void scan() throws Exception {
ManagedDescription descAnn = getClass().getAnnotation(ManagedDescription.class);
if (descAnn != null) {
description = descAnn.value();
}
for (Method method : getClass().getMethods()) {
if (method.isAnnotationPresent(ManagedAnnotation.class)) {
ManagedAnnotation ann = method.getAnnotation(ManagedAnnotation.class);
String name = ann.value();
if (name.isEmpty()) {
throw new IllegalArgumentException("Name cannot be empty!");
}
boolean operation = ann.operation();
HashMap<String, BeanInfo> map = operation ? operationMap : attributeMap;
if (map.containsKey(name)) {
throw new IllegalArgumentException("Duplicate name: " + name);
}
descAnn = method.getAnnotation(ManagedDescription.class);
String desc = null;
if (descAnn != null) {
desc = descAnn.value();
}
map.put(name, new BeanInfo(name, desc, method));
}
}
}
@Override
public Object getAttribute(String attribute)
throws AttributeNotFoundException, MBeanException, ReflectionException {
if (attribute == null || attribute.length() == 0) {
throw new NullPointerException("Invalid null attribute requested");
}
BeanInfo info = attributeMap.get(attribute);
try {
return info.method.invoke(this);
} catch (Exception e) {
throw new ReflectionException(e);
}
}
public void setObjectName(Map<String, String> properties) {
try {
objectName = new ObjectName(ManagementService.DOMAIN, new Hashtable<String, String>(properties));
} catch (MalformedObjectNameException e) {
throw new IllegalArgumentException("Failed to create an ObjectName", e);
}
}
@Override
public void setAttribute(Attribute attribute)
throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException {
throw new UnsupportedOperationException();
}
@Override
public AttributeList getAttributes(String[] attributes) {
AttributeList list = new AttributeList(attributes.length);
try {
for (String attribute : attributes) {
list.add(new Attribute(attribute, getAttribute(attribute)));
}
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
return list;
}
@Override
public AttributeList setAttributes(AttributeList attributes) {
throw new UnsupportedOperationException();
}
@Override
public Object invoke(String actionName, Object[] params, String[] signature)
throws MBeanException, ReflectionException {
if (actionName == null || actionName.isEmpty()) {
throw new IllegalArgumentException("Empty actionName");
}
BeanInfo info = operationMap.get(actionName);
if (info == null) {
throw new UnsupportedOperationException("Operation: " + actionName + " not registered");
}
try {
return info.method.invoke(this, params);
} catch (Exception e) {
throw new ReflectionException(e);
}
}
@Override
public MBeanInfo getMBeanInfo() {
String className = managedObject.getClass().getName();
return new MBeanInfo(className, description, attributeInfos(), null, operationInfos(), null);
}
private MBeanAttributeInfo[] attributeInfos() {
MBeanAttributeInfo[] array = new MBeanAttributeInfo[attributeMap.size()];
int i = 0;
for (BeanInfo beanInfo : attributeMap.values()) {
array[i++] = beanInfo.getAttributeInfo();
}
return array;
}
private MBeanOperationInfo[] operationInfos() {
MBeanOperationInfo[] array = new MBeanOperationInfo[operationMap.size()];
int i = 0;
for (BeanInfo beanInfo : operationMap.values()) {
array[i++] = beanInfo.getOperationInfo();
}
return array;
}
private class BeanInfo {
final String name;
final String description;
transient Method method;
public BeanInfo(String name, String description, Method method) {
this.name = name;
this.description = description;
this.method = method;
}
public MBeanAttributeInfo getAttributeInfo() {
try {
return new MBeanAttributeInfo(name, description, method, null);
} catch (IntrospectionException e) {
throw new IllegalArgumentException(e);
}
}
public MBeanOperationInfo getOperationInfo() {
return new MBeanOperationInfo(description, method);
}
}
@Override
public ObjectName preRegister(MBeanServer server, ObjectName name) throws Exception {
try {
scan();
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
return objectName;
}
@Override
public void postRegister(Boolean registrationDone) {
}
@Override
public void preDeregister() throws Exception {
}
@Override
public void postDeregister() {
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.route53.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* A complex type that contains information about the request to create a hosted
* zone.
* </p>
*/
public class CreateHostedZoneRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* The name of the domain. This must be a fully-specified domain, for
* example, www.example.com. The trailing dot is optional; Amazon Route 53
* assumes that the domain name is fully qualified. This means that Amazon
* Route 53 treats www.example.com (without a trailing dot) and
* www.example.com. (with a trailing dot) as identical.
* </p>
* <p>
* This is the name you have registered with your DNS registrar. You should
* ask your registrar to change the authoritative name servers for your
* domain to the set of <code>NameServers</code> elements returned in
* <code>DelegationSet</code>.
* </p>
*/
private String name;
/**
* <p>
* The VPC that you want your hosted zone to be associated with. By
* providing this parameter, your newly created hosted cannot be resolved
* anywhere other than the given VPC.
* </p>
*/
private VPC vPC;
/**
* <p>
* A unique string that identifies the request and that allows failed
* <code>CreateHostedZone</code> requests to be retried without the risk of
* executing the operation twice. You must use a unique
* <code>CallerReference</code> string every time you create a hosted zone.
* <code>CallerReference</code> can be any unique string; you might choose
* to use a string that identifies your project, such as
* <code>DNSMigration_01</code>.
* </p>
* <p>
* Valid characters are any Unicode code points that are legal in an XML 1.0
* document. The UTF-8 encoding of the value must be less than 128 bytes.
* </p>
*/
private String callerReference;
/**
* <p>
* A complex type that contains an optional comment about your hosted zone.
* </p>
*/
private HostedZoneConfig hostedZoneConfig;
/**
* <p>
* The delegation set id of the reusable delgation set whose NS records you
* want to assign to the new hosted zone.
* </p>
*/
private String delegationSetId;
/**
* Default constructor for CreateHostedZoneRequest object. Callers should
* use the setter or fluent setter (with...) methods to initialize the
* object after creating it.
*/
public CreateHostedZoneRequest() {
}
/**
* Constructs a new CreateHostedZoneRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize any additional
* object members.
*
* @param name
* The name of the domain. This must be a fully-specified domain, for
* example, www.example.com. The trailing dot is optional; Amazon
* Route 53 assumes that the domain name is fully qualified. This
* means that Amazon Route 53 treats www.example.com (without a
* trailing dot) and www.example.com. (with a trailing dot) as
* identical.</p>
* <p>
* This is the name you have registered with your DNS registrar. You
* should ask your registrar to change the authoritative name servers
* for your domain to the set of <code>NameServers</code> elements
* returned in <code>DelegationSet</code>.
* @param callerReference
* A unique string that identifies the request and that allows failed
* <code>CreateHostedZone</code> requests to be retried without the
* risk of executing the operation twice. You must use a unique
* <code>CallerReference</code> string every time you create a hosted
* zone. <code>CallerReference</code> can be any unique string; you
* might choose to use a string that identifies your project, such as
* <code>DNSMigration_01</code>.
* </p>
* <p>
* Valid characters are any Unicode code points that are legal in an
* XML 1.0 document. The UTF-8 encoding of the value must be less
* than 128 bytes.
*/
public CreateHostedZoneRequest(String name, String callerReference) {
setName(name);
setCallerReference(callerReference);
}
/**
* <p>
* The name of the domain. This must be a fully-specified domain, for
* example, www.example.com. The trailing dot is optional; Amazon Route 53
* assumes that the domain name is fully qualified. This means that Amazon
* Route 53 treats www.example.com (without a trailing dot) and
* www.example.com. (with a trailing dot) as identical.
* </p>
* <p>
* This is the name you have registered with your DNS registrar. You should
* ask your registrar to change the authoritative name servers for your
* domain to the set of <code>NameServers</code> elements returned in
* <code>DelegationSet</code>.
* </p>
*
* @param name
* The name of the domain. This must be a fully-specified domain, for
* example, www.example.com. The trailing dot is optional; Amazon
* Route 53 assumes that the domain name is fully qualified. This
* means that Amazon Route 53 treats www.example.com (without a
* trailing dot) and www.example.com. (with a trailing dot) as
* identical.</p>
* <p>
* This is the name you have registered with your DNS registrar. You
* should ask your registrar to change the authoritative name servers
* for your domain to the set of <code>NameServers</code> elements
* returned in <code>DelegationSet</code>.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the domain. This must be a fully-specified domain, for
* example, www.example.com. The trailing dot is optional; Amazon Route 53
* assumes that the domain name is fully qualified. This means that Amazon
* Route 53 treats www.example.com (without a trailing dot) and
* www.example.com. (with a trailing dot) as identical.
* </p>
* <p>
* This is the name you have registered with your DNS registrar. You should
* ask your registrar to change the authoritative name servers for your
* domain to the set of <code>NameServers</code> elements returned in
* <code>DelegationSet</code>.
* </p>
*
* @return The name of the domain. This must be a fully-specified domain,
* for example, www.example.com. The trailing dot is optional;
* Amazon Route 53 assumes that the domain name is fully qualified.
* This means that Amazon Route 53 treats www.example.com (without a
* trailing dot) and www.example.com. (with a trailing dot) as
* identical.</p>
* <p>
* This is the name you have registered with your DNS registrar. You
* should ask your registrar to change the authoritative name
* servers for your domain to the set of <code>NameServers</code>
* elements returned in <code>DelegationSet</code>.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the domain. This must be a fully-specified domain, for
* example, www.example.com. The trailing dot is optional; Amazon Route 53
* assumes that the domain name is fully qualified. This means that Amazon
* Route 53 treats www.example.com (without a trailing dot) and
* www.example.com. (with a trailing dot) as identical.
* </p>
* <p>
* This is the name you have registered with your DNS registrar. You should
* ask your registrar to change the authoritative name servers for your
* domain to the set of <code>NameServers</code> elements returned in
* <code>DelegationSet</code>.
* </p>
*
* @param name
* The name of the domain. This must be a fully-specified domain, for
* example, www.example.com. The trailing dot is optional; Amazon
* Route 53 assumes that the domain name is fully qualified. This
* means that Amazon Route 53 treats www.example.com (without a
* trailing dot) and www.example.com. (with a trailing dot) as
* identical.</p>
* <p>
* This is the name you have registered with your DNS registrar. You
* should ask your registrar to change the authoritative name servers
* for your domain to the set of <code>NameServers</code> elements
* returned in <code>DelegationSet</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateHostedZoneRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The VPC that you want your hosted zone to be associated with. By
* providing this parameter, your newly created hosted cannot be resolved
* anywhere other than the given VPC.
* </p>
*
* @param vPC
* The VPC that you want your hosted zone to be associated with. By
* providing this parameter, your newly created hosted cannot be
* resolved anywhere other than the given VPC.
*/
public void setVPC(VPC vPC) {
this.vPC = vPC;
}
/**
* <p>
* The VPC that you want your hosted zone to be associated with. By
* providing this parameter, your newly created hosted cannot be resolved
* anywhere other than the given VPC.
* </p>
*
* @return The VPC that you want your hosted zone to be associated with. By
* providing this parameter, your newly created hosted cannot be
* resolved anywhere other than the given VPC.
*/
public VPC getVPC() {
return this.vPC;
}
/**
* <p>
* The VPC that you want your hosted zone to be associated with. By
* providing this parameter, your newly created hosted cannot be resolved
* anywhere other than the given VPC.
* </p>
*
* @param vPC
* The VPC that you want your hosted zone to be associated with. By
* providing this parameter, your newly created hosted cannot be
* resolved anywhere other than the given VPC.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateHostedZoneRequest withVPC(VPC vPC) {
setVPC(vPC);
return this;
}
/**
* <p>
* A unique string that identifies the request and that allows failed
* <code>CreateHostedZone</code> requests to be retried without the risk of
* executing the operation twice. You must use a unique
* <code>CallerReference</code> string every time you create a hosted zone.
* <code>CallerReference</code> can be any unique string; you might choose
* to use a string that identifies your project, such as
* <code>DNSMigration_01</code>.
* </p>
* <p>
* Valid characters are any Unicode code points that are legal in an XML 1.0
* document. The UTF-8 encoding of the value must be less than 128 bytes.
* </p>
*
* @param callerReference
* A unique string that identifies the request and that allows failed
* <code>CreateHostedZone</code> requests to be retried without the
* risk of executing the operation twice. You must use a unique
* <code>CallerReference</code> string every time you create a hosted
* zone. <code>CallerReference</code> can be any unique string; you
* might choose to use a string that identifies your project, such as
* <code>DNSMigration_01</code>.</p>
* <p>
* Valid characters are any Unicode code points that are legal in an
* XML 1.0 document. The UTF-8 encoding of the value must be less
* than 128 bytes.
*/
public void setCallerReference(String callerReference) {
this.callerReference = callerReference;
}
/**
* <p>
* A unique string that identifies the request and that allows failed
* <code>CreateHostedZone</code> requests to be retried without the risk of
* executing the operation twice. You must use a unique
* <code>CallerReference</code> string every time you create a hosted zone.
* <code>CallerReference</code> can be any unique string; you might choose
* to use a string that identifies your project, such as
* <code>DNSMigration_01</code>.
* </p>
* <p>
* Valid characters are any Unicode code points that are legal in an XML 1.0
* document. The UTF-8 encoding of the value must be less than 128 bytes.
* </p>
*
* @return A unique string that identifies the request and that allows
* failed <code>CreateHostedZone</code> requests to be retried
* without the risk of executing the operation twice. You must use a
* unique <code>CallerReference</code> string every time you create
* a hosted zone. <code>CallerReference</code> can be any unique
* string; you might choose to use a string that identifies your
* project, such as <code>DNSMigration_01</code>.</p>
* <p>
* Valid characters are any Unicode code points that are legal in an
* XML 1.0 document. The UTF-8 encoding of the value must be less
* than 128 bytes.
*/
public String getCallerReference() {
return this.callerReference;
}
/**
* <p>
* A unique string that identifies the request and that allows failed
* <code>CreateHostedZone</code> requests to be retried without the risk of
* executing the operation twice. You must use a unique
* <code>CallerReference</code> string every time you create a hosted zone.
* <code>CallerReference</code> can be any unique string; you might choose
* to use a string that identifies your project, such as
* <code>DNSMigration_01</code>.
* </p>
* <p>
* Valid characters are any Unicode code points that are legal in an XML 1.0
* document. The UTF-8 encoding of the value must be less than 128 bytes.
* </p>
*
* @param callerReference
* A unique string that identifies the request and that allows failed
* <code>CreateHostedZone</code> requests to be retried without the
* risk of executing the operation twice. You must use a unique
* <code>CallerReference</code> string every time you create a hosted
* zone. <code>CallerReference</code> can be any unique string; you
* might choose to use a string that identifies your project, such as
* <code>DNSMigration_01</code>.</p>
* <p>
* Valid characters are any Unicode code points that are legal in an
* XML 1.0 document. The UTF-8 encoding of the value must be less
* than 128 bytes.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateHostedZoneRequest withCallerReference(String callerReference) {
setCallerReference(callerReference);
return this;
}
/**
* <p>
* A complex type that contains an optional comment about your hosted zone.
* </p>
*
* @param hostedZoneConfig
* A complex type that contains an optional comment about your hosted
* zone.
*/
public void setHostedZoneConfig(HostedZoneConfig hostedZoneConfig) {
this.hostedZoneConfig = hostedZoneConfig;
}
/**
* <p>
* A complex type that contains an optional comment about your hosted zone.
* </p>
*
* @return A complex type that contains an optional comment about your
* hosted zone.
*/
public HostedZoneConfig getHostedZoneConfig() {
return this.hostedZoneConfig;
}
/**
* <p>
* A complex type that contains an optional comment about your hosted zone.
* </p>
*
* @param hostedZoneConfig
* A complex type that contains an optional comment about your hosted
* zone.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateHostedZoneRequest withHostedZoneConfig(
HostedZoneConfig hostedZoneConfig) {
setHostedZoneConfig(hostedZoneConfig);
return this;
}
/**
* <p>
* The delegation set id of the reusable delgation set whose NS records you
* want to assign to the new hosted zone.
* </p>
*
* @param delegationSetId
* The delegation set id of the reusable delgation set whose NS
* records you want to assign to the new hosted zone.
*/
public void setDelegationSetId(String delegationSetId) {
this.delegationSetId = delegationSetId;
}
/**
* <p>
* The delegation set id of the reusable delgation set whose NS records you
* want to assign to the new hosted zone.
* </p>
*
* @return The delegation set id of the reusable delgation set whose NS
* records you want to assign to the new hosted zone.
*/
public String getDelegationSetId() {
return this.delegationSetId;
}
/**
* <p>
* The delegation set id of the reusable delgation set whose NS records you
* want to assign to the new hosted zone.
* </p>
*
* @param delegationSetId
* The delegation set id of the reusable delgation set whose NS
* records you want to assign to the new hosted zone.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateHostedZoneRequest withDelegationSetId(String delegationSetId) {
setDelegationSetId(delegationSetId);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: " + getName() + ",");
if (getVPC() != null)
sb.append("VPC: " + getVPC() + ",");
if (getCallerReference() != null)
sb.append("CallerReference: " + getCallerReference() + ",");
if (getHostedZoneConfig() != null)
sb.append("HostedZoneConfig: " + getHostedZoneConfig() + ",");
if (getDelegationSetId() != null)
sb.append("DelegationSetId: " + getDelegationSetId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateHostedZoneRequest == false)
return false;
CreateHostedZoneRequest other = (CreateHostedZoneRequest) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null
&& other.getName().equals(this.getName()) == false)
return false;
if (other.getVPC() == null ^ this.getVPC() == null)
return false;
if (other.getVPC() != null
&& other.getVPC().equals(this.getVPC()) == false)
return false;
if (other.getCallerReference() == null
^ this.getCallerReference() == null)
return false;
if (other.getCallerReference() != null
&& other.getCallerReference().equals(this.getCallerReference()) == false)
return false;
if (other.getHostedZoneConfig() == null
^ this.getHostedZoneConfig() == null)
return false;
if (other.getHostedZoneConfig() != null
&& other.getHostedZoneConfig().equals(
this.getHostedZoneConfig()) == false)
return false;
if (other.getDelegationSetId() == null
^ this.getDelegationSetId() == null)
return false;
if (other.getDelegationSetId() != null
&& other.getDelegationSetId().equals(this.getDelegationSetId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode
+ ((getVPC() == null) ? 0 : getVPC().hashCode());
hashCode = prime
* hashCode
+ ((getCallerReference() == null) ? 0 : getCallerReference()
.hashCode());
hashCode = prime
* hashCode
+ ((getHostedZoneConfig() == null) ? 0 : getHostedZoneConfig()
.hashCode());
hashCode = prime
* hashCode
+ ((getDelegationSetId() == null) ? 0 : getDelegationSetId()
.hashCode());
return hashCode;
}
@Override
public CreateHostedZoneRequest clone() {
return (CreateHostedZoneRequest) super.clone();
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.test.context.support;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.core.annotation.MergedAnnotation;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.log.LogMessage;
import org.springframework.core.style.ToStringCreator;
import org.springframework.lang.Nullable;
import org.springframework.test.context.TestPropertySource;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
import org.springframework.util.ResourceUtils;
import org.springframework.util.StringUtils;
/**
* {@code TestPropertySourceAttributes} encapsulates attributes declared
* via {@link TestPropertySource @TestPropertySource} annotations.
*
* <p>In addition to encapsulating declared attributes,
* {@code TestPropertySourceAttributes} also enforces configuration rules.
*
* @author Sam Brannen
* @author Phillip Webb
* @since 4.1
* @see TestPropertySource
* @see MergedTestPropertySources
*/
class TestPropertySourceAttributes {
private static final Log logger = LogFactory.getLog(TestPropertySourceAttributes.class);
private final Class<?> declaringClass;
private final MergedAnnotation<?> rootAnnotation;
private final List<String> locations = new ArrayList<>();
private final boolean inheritLocations;
private final List<String> properties = new ArrayList<>();
private final boolean inheritProperties;
TestPropertySourceAttributes(MergedAnnotation<TestPropertySource> annotation) {
this.declaringClass = declaringClass(annotation);
this.rootAnnotation = annotation.getRoot();
this.inheritLocations = annotation.getBoolean("inheritLocations");
this.inheritProperties = annotation.getBoolean("inheritProperties");
addPropertiesAndLocationsFrom(annotation);
}
/**
* Merge this {@code TestPropertySourceAttributes} instance with the
* supplied {@code TestPropertySourceAttributes}, asserting that the two sets
* of test property source attributes have identical values for the
* {@link TestPropertySource#inheritLocations} and
* {@link TestPropertySource#inheritProperties} flags and that the two
* underlying annotations were declared on the same class.
* @since 5.2
*/
void mergeWith(TestPropertySourceAttributes attributes) {
Assert.state(attributes.declaringClass == this.declaringClass,
() -> "Detected @TestPropertySource declarations within an aggregate index "
+ "with different sources: " + this.declaringClass.getName() + " and "
+ attributes.declaringClass.getName());
logger.trace(LogMessage.format("Retrieved %s for declaring class [%s].",
attributes, this.declaringClass.getName()));
assertSameBooleanAttribute(this.inheritLocations, attributes.inheritLocations,
"inheritLocations", attributes);
assertSameBooleanAttribute(this.inheritProperties, attributes.inheritProperties,
"inheritProperties", attributes);
mergePropertiesAndLocationsFrom(attributes);
}
private void assertSameBooleanAttribute(boolean expected, boolean actual,
String attributeName, TestPropertySourceAttributes that) {
Assert.isTrue(expected == actual, () -> String.format(
"@%s on %s and @%s on %s must declare the same value for '%s' as other " +
"directly present or meta-present @TestPropertySource annotations",
this.rootAnnotation.getType().getSimpleName(), this.declaringClass.getSimpleName(),
that.rootAnnotation.getType().getSimpleName(), that.declaringClass.getSimpleName(),
attributeName));
}
private void addPropertiesAndLocationsFrom(MergedAnnotation<TestPropertySource> mergedAnnotation) {
String[] locations = mergedAnnotation.getStringArray("locations");
String[] properties = mergedAnnotation.getStringArray("properties");
addPropertiesAndLocations(locations, properties, declaringClass(mergedAnnotation), false);
}
private void mergePropertiesAndLocationsFrom(TestPropertySourceAttributes attributes) {
addPropertiesAndLocations(attributes.getLocations(), attributes.getProperties(),
attributes.getDeclaringClass(), true);
}
private void addPropertiesAndLocations(String[] locations, String[] properties,
Class<?> declaringClass, boolean prepend) {
if (ObjectUtils.isEmpty(locations) && ObjectUtils.isEmpty(properties)) {
addAll(prepend, this.locations, detectDefaultPropertiesFile(declaringClass));
}
else {
addAll(prepend, this.locations, locations);
addAll(prepend, this.properties, properties);
}
}
/**
* Add all of the supplied elements to the provided list, honoring the
* {@code prepend} flag.
* <p>If the {@code prepend} flag is {@code false}, the elements will appended
* to the list.
* @param prepend whether the elements should be prepended to the list
* @param list the list to which to add the elements
* @param elements the elements to add to the list
*/
private void addAll(boolean prepend, List<String> list, String... elements) {
list.addAll((prepend ? 0 : list.size()), Arrays.asList(elements));
}
private String detectDefaultPropertiesFile(Class<?> testClass) {
String resourcePath = ClassUtils.convertClassNameToResourcePath(testClass.getName()) + ".properties";
ClassPathResource classPathResource = new ClassPathResource(resourcePath);
if (!classPathResource.exists()) {
String msg = String.format(
"Could not detect default properties file for test class [%s]: " +
"%s does not exist. Either declare the 'locations' or 'properties' attributes " +
"of @TestPropertySource or make the default properties file available.",
testClass.getName(), classPathResource);
logger.error(msg);
throw new IllegalStateException(msg);
}
String prefixedResourcePath = ResourceUtils.CLASSPATH_URL_PREFIX + resourcePath;
if (logger.isInfoEnabled()) {
logger.info(String.format("Detected default properties file \"%s\" for test class [%s]",
prefixedResourcePath, testClass.getName()));
}
return prefixedResourcePath;
}
/**
* Get the {@linkplain Class class} that declared {@code @TestPropertySource}.
* @return the declaring class; never {@code null}
*/
Class<?> getDeclaringClass() {
return this.declaringClass;
}
/**
* Get the resource locations that were declared via {@code @TestPropertySource}.
* <p>Note: The returned value may represent a <em>detected default</em>
* or merged locations that do not match the original value declared via a
* single {@code @TestPropertySource} annotation.
* @return the resource locations; potentially <em>empty</em>
* @see TestPropertySource#value
* @see TestPropertySource#locations
*/
String[] getLocations() {
return StringUtils.toStringArray(this.locations);
}
/**
* Get the {@code inheritLocations} flag that was declared via {@code @TestPropertySource}.
* @return the {@code inheritLocations} flag
* @see TestPropertySource#inheritLocations
*/
boolean isInheritLocations() {
return this.inheritLocations;
}
/**
* Get the inlined properties that were declared via {@code @TestPropertySource}.
* <p>Note: The returned value may represent merged properties that do not
* match the original value declared via a single {@code @TestPropertySource}
* annotation.
* @return the inlined properties; potentially <em>empty</em>
* @see TestPropertySource#properties
*/
String[] getProperties() {
return StringUtils.toStringArray(this.properties);
}
/**
* Get the {@code inheritProperties} flag that was declared via {@code @TestPropertySource}.
* @return the {@code inheritProperties} flag
* @see TestPropertySource#inheritProperties
*/
boolean isInheritProperties() {
return this.inheritProperties;
}
boolean isEmpty() {
return (this.locations.isEmpty() && this.properties.isEmpty());
}
@Override
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
if (other == null || other.getClass() != getClass()) {
return false;
}
TestPropertySourceAttributes that = (TestPropertySourceAttributes) other;
if (!this.locations.equals(that.locations)) {
return false;
}
if (!this.properties.equals(that.properties)) {
return false;
}
if (this.inheritLocations != that.inheritLocations) {
return false;
}
if (this.inheritProperties != that.inheritProperties) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = this.locations.hashCode();
result = 31 * result + this.properties.hashCode();
result = 31 * result + (this.inheritLocations ? 1231 : 1237);
result = 31 * result + (this.inheritProperties ? 1231 : 1237);
return result;
}
/**
* Provide a String representation of the {@code @TestPropertySource}
* attributes and declaring class.
*/
@Override
public String toString() {
return new ToStringCreator(this)
.append("declaringClass", this.declaringClass.getName())
.append("locations", this.locations)
.append("inheritLocations", this.inheritLocations)
.append("properties", this.properties)
.append("inheritProperties", this.inheritProperties)
.toString();
}
private static Class<?> declaringClass(MergedAnnotation<?> mergedAnnotation) {
Object source = mergedAnnotation.getSource();
Assert.state(source instanceof Class, "No source class available");
return (Class<?>) source;
}
}
| |
package com.cafe;
import android.app.ActionBar;
import android.app.Activity;
import android.net.Uri;
import android.os.Bundle;
import android.app.ActionBar;
import android.app.ActionBar.Tab;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.view.ViewPager;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.EditText;
import android.widget.TabHost.OnTabChangeListener;
import android.view.Menu;
import android.view.MenuItem;
public class Home extends FragmentActivity implements ActionBar.TabListener {
private ViewPager viewPager;
private TabsPagerAdapter mAdapter;
private ActionBar actionBar;
// Tab titles
private String[] tabs = { "Income", "Deductions", "Taxes Paid"};
public static Home screen ;
Context context;
View view;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.home);
initComponent();
}
private void initComponent()
{
screen = this;
context = getApplicationContext();
getActionBar().setTitle("");
getActionBar().setBackgroundDrawable(new ColorDrawable(Color.parseColor("#ffffff")));
// Initilization
viewPager = (ViewPager) findViewById(R.id.pager);
actionBar = getActionBar();
mAdapter = new TabsPagerAdapter(getSupportFragmentManager());
//getActionBar().setDisplayHomeAsUpEnabled(true);
viewPager.setAdapter(mAdapter);
//actionBar.setHomeButtonEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
//actionBar.setDisplayUseLogoEnabled(true);
// Adding Tabs
/*for (String tab_name : tabs) {
actionBar.addTab(actionBar.newTab().setText(tab_name)
.setTabListener(this));
}
actionBar.setSelectedNavigationItem(1);*/
addingFragement();
/*for (int i = 0; i < mAdapter.getCount(); i++) {
boolean preselected = (i == 1);
actionBar.addTab(actionBar.newTab().setText(
mAdapter.getPageTitle(i)).setTabListener(this), preselected);
}*/
initPageScroll();
}
private void addingFragement()
{
Tab tab_timeLine = actionBar
.newTab()
.setText("TimeStart")
.setTabListener(this);
// actionBar.addTab(tab_timeLine);
Tab tab_home = actionBar
.newTab()
.setText("Active")
.setTabListener(this);
// actionBar.addTab(tab_home);
Tab tab_fact = actionBar
.newTab()
.setText("Actual")
.setTabListener(this);
// actionBar.addTab(tab_fact);
actionBar.addTab(tab_timeLine, 0, false);
actionBar.addTab(tab_home, 1, true);
actionBar.addTab(tab_fact, 2, false);
}
private void initPageScroll()
{
viewPager.setOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageSelected(int position) {
// on changing the page
// make respected tab selected
String hasValues = "";
actionBar.setSelectedNavigationItem(position);
viewPager.setCurrentItem(position);
/*if(position == 0)
{
inc_fragment = new Incomefragment();
hasValues = inc_fragment.checkFieldText(context);
boolean isValid;
if(validate1.matches("false")&&validate12.matches("false")&&validate13.matches("false")){
isValid = false;
}
else
//System.out.println("validate1"+validate1 +"validate12"+validate12+"validate13"+validate13);
if(validate1.matches("")&&validate12.matches("")&&validate13.matches(""))
isValid = false;
else
isValid = true;
if(hasValues.matches(""))
isValid = false;
else
isValid = true;
int currentPosition = 0;
// <-- here, you need to check yourself valid or not
if (!isValid) {
actionBar.setSelectedNavigationItem(currentPosition);
viewPager.setCurrentItem(currentPosition);
inc_fragment.inputValidation(context);
// MessageDialog.showMessage("Alert", "Income is mandatory.", ITRdata.screen);
}else{
actionBar.setSelectedNavigationItem(position);
viewPager.setCurrentItem(position);
inc_fragment.inputValidation(context);
//inc_fragment.saveToDataBase(context);
currentPosition = position;
}
}
else if(position == 1)
{
actionBar.setSelectedNavigationItem(currentPosition);
viewPager.setCurrentItem(currentPosition);
}
else if(position == 2)
{
actionBar.setSelectedNavigationItem(position);
viewPager.setCurrentItem(position);
ded_fragment = new Deductionsfragment();
ded_fragment.inputValidation(context);
boolean isValid;
if(validate2.matches("false")){
isValid = false;
}
else if(validate2.matches(""))
isValid = false;
else
isValid = true;
int currentPosition = 0;
// <-- here, you need to check yourself valid or not
if (!isValid) {
//viewPager.setCurrentItem(position);
//ded_fragment.inputValidation(context);
}else{
//viewPager.setCurrentItem(position);
//ded_fragment.inputValidation(context);
//inc_fragment.saveToDataBase(context);
}
}
else
{
actionBar.setSelectedNavigationItem(position);
viewPager.setCurrentItem(position);
}*/
//}
}
@Override
public void onPageScrolled(int pos, float arg1, int arg2) {
}
@Override
public void onPageScrollStateChanged(int pos) {
}
});
}
@Override
public void onTabSelected(Tab tab, FragmentTransaction ft) {
// TODO Auto-generated method stub
viewPager.setCurrentItem(tab.getPosition());
}
@Override
public void onTabUnselected(Tab tab, FragmentTransaction ft) {
// TODO Auto-generated method stub
}
@Override
public void onTabReselected(Tab tab, FragmentTransaction ft) {
// TODO Auto-generated method stub
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.settings:
openSettings();
//Toast.makeText(getApplicationContext(), "Share this awesome app to your friends!", Toast.LENGTH_SHORT).show();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void openSettings()
{
Intent settingIntent = new Intent(getApplicationContext(),UserSetting.class);
overridePendingTransition(R.anim.fade_in, R.anim.fade_out);
startActivity(settingIntent);
}
public void shareApp()
{
String urlToshare = "https://play.google.com/store/apps/details?id=com.cafe";
Uri uri = Uri.parse(urlToshare);
String shareBody = "Hey,\nI just downloaded Cafe on my Android Mobile.Cafe make quick and convenient like never before.";
Intent shareApp = new Intent(android.content.Intent.ACTION_SEND);
shareApp.setType("text/plain");
// now adding message to be shared
//shareApp.putExtra(Intent.EXTRA_SUBJECT, shareBody);
shareApp.putExtra(Intent.EXTRA_TEXT, shareBody+" Click to install "+ uri);
startActivity(Intent.createChooser(shareApp, "Share Cafe."));
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudsearch.model;
/**
* <p>
* Specifies how to map source attribute values to custom values when populating an <code>IndexField</code> .
* </p>
*/
public class SourceDataMap {
/**
* The name of the document source field to add to this
* <code>IndexField</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
* <b>Pattern: </b>[a-z][a-z0-9_]*<br/>
*/
private String sourceName;
/**
* The value of a field or source document attribute.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 1024<br/>
*/
private String defaultValue;
/**
* A map that translates source field values to custom values.
*/
private java.util.Map<String,String> cases;
/**
* The name of the document source field to add to this
* <code>IndexField</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
* <b>Pattern: </b>[a-z][a-z0-9_]*<br/>
*
* @return The name of the document source field to add to this
* <code>IndexField</code>.
*/
public String getSourceName() {
return sourceName;
}
/**
* The name of the document source field to add to this
* <code>IndexField</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
* <b>Pattern: </b>[a-z][a-z0-9_]*<br/>
*
* @param sourceName The name of the document source field to add to this
* <code>IndexField</code>.
*/
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
/**
* The name of the document source field to add to this
* <code>IndexField</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
* <b>Pattern: </b>[a-z][a-z0-9_]*<br/>
*
* @param sourceName The name of the document source field to add to this
* <code>IndexField</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SourceDataMap withSourceName(String sourceName) {
this.sourceName = sourceName;
return this;
}
/**
* The value of a field or source document attribute.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 1024<br/>
*
* @return The value of a field or source document attribute.
*/
public String getDefaultValue() {
return defaultValue;
}
/**
* The value of a field or source document attribute.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 1024<br/>
*
* @param defaultValue The value of a field or source document attribute.
*/
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
/**
* The value of a field or source document attribute.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 1024<br/>
*
* @param defaultValue The value of a field or source document attribute.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SourceDataMap withDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return this;
}
/**
* A map that translates source field values to custom values.
*
* @return A map that translates source field values to custom values.
*/
public java.util.Map<String,String> getCases() {
if (cases == null) {
cases = new java.util.HashMap<String,String>();
}
return cases;
}
/**
* A map that translates source field values to custom values.
*
* @param cases A map that translates source field values to custom values.
*/
public void setCases(java.util.Map<String,String> cases) {
this.cases = cases;
}
/**
* A map that translates source field values to custom values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param cases A map that translates source field values to custom values.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SourceDataMap withCases(java.util.Map<String,String> cases) {
setCases(cases);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (sourceName != null) sb.append("SourceName: " + sourceName + ", ");
if (defaultValue != null) sb.append("DefaultValue: " + defaultValue + ", ");
if (cases != null) sb.append("Cases: " + cases + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSourceName() == null) ? 0 : getSourceName().hashCode());
hashCode = prime * hashCode + ((getDefaultValue() == null) ? 0 : getDefaultValue().hashCode());
hashCode = prime * hashCode + ((getCases() == null) ? 0 : getCases().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof SourceDataMap == false) return false;
SourceDataMap other = (SourceDataMap)obj;
if (other.getSourceName() == null ^ this.getSourceName() == null) return false;
if (other.getSourceName() != null && other.getSourceName().equals(this.getSourceName()) == false) return false;
if (other.getDefaultValue() == null ^ this.getDefaultValue() == null) return false;
if (other.getDefaultValue() != null && other.getDefaultValue().equals(this.getDefaultValue()) == false) return false;
if (other.getCases() == null ^ this.getCases() == null) return false;
if (other.getCases() != null && other.getCases().equals(this.getCases()) == false) return false;
return true;
}
}
| |
package mogp;
import java.util.Set;
import java.util.HashSet;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Map.Entry;
import java.util.Collections;
/**
* DominantionMaintenance - maintenance approach using domination.
*
* @author Jonathan Fieldsend
* @version 1.1
*/
public class DominationMaintenance extends StandardMaintenance
{
final Set<ArraySolution> nondominatedSet = new HashSet<>();
final Set<ArraySolution> dominatedSet = new HashSet<>();
/**
* Constructor of maintenance object
*
* @param problem problem to be optimised
* @param parameters algorithm parameters
* @param type of mimisiation (e.g. standard or parsimonious)
*/
DominationMaintenance(Problem problem, Parameters parameters, MinimisationType type) {
super(problem, parameters,type);
}
/**
* @InheritDoc
*/
@Override
public ArraySolution negativeTournament(HashMap<Integer, ArraySolution> pop) {
//System.out.println(maintainedSetsTotalSize());
if (getParetoSetSize() >= parameters.POPULATION_SIZE){
ArraySolution i = super.negativeTournament(pop);
nondominatedSet.remove(i);
return i;
}
ArraySolution worst = sampleRandomExcludingElite(), competitor;
for (int i = 1; i < parameters.TOURNAMENT_SIZE; i ++ ) {
competitor = sampleRandomExcludingElite();
if ( competitor.getSumOfTestsFailed() > worst.getSumOfTestsFailed() ) {
worst = competitor;
} else if (type.equals(MinimisationType.PARSIMONIOUS)) {
if (competitor.getSumOfTestsFailed() == worst.getSumOfTestsFailed() ) {
if (competitor.size() > worst.size()){
worst = competitor;
}
}
}
}
dominatedSet.remove(worst);
return worst;
}
/**
* @InheritDoc
*/
@Override
public int negativeTournamentKey(HashMap<Integer, ArraySolution> pop) {
//System.out.println(maintainedSetsTotalSize());
if (getParetoSetSize() >= parameters.POPULATION_SIZE){
int i = super.negativeTournamentKey(pop);
nondominatedSet.remove(pop.get(i));
return i;
}
int worst = sampleRandomIndexExcludingElite(pop), competitor;
for (int i = 1; i < parameters.TOURNAMENT_SIZE; i ++ ) {
competitor = sampleRandomIndexExcludingElite(pop);
if ( pop.get(competitor).getSumOfTestsFailed() > pop.get(worst).getSumOfTestsFailed() ) {
worst = competitor;
} else if (type.equals(MinimisationType.PARSIMONIOUS)) {
if (pop.get(competitor).getSumOfTestsFailed() == pop.get(worst).getSumOfTestsFailed() ) {
if (pop.get(competitor).size() > pop.get(worst).size()){
worst = competitor;
}
}
}
}
dominatedSet.remove(pop.get(worst));
return worst;
}
/**
* @InheritDoc
*/
@Override
public void evaluateFitness(HashMap<Integer, ArraySolution> pop, ArraySolution s) {
super.evaluateFitness(pop,s);
updateParetoSet(pop,s);
//System.out.println(maintainedSetsTotalSize());
}
/*
* Ensure estimated pareto set is made up of mutually non-dominating solutions, now that
* solution at index has changed
*/
private void updateParetoSet(HashMap<Integer, ArraySolution> pop, ArraySolution s){
Timing.setStartTime(); // put in to track time spent in update
if (!setWeakDominates(s)){
addToParetoSet(s);
} else {
dominatedSet.add(s);
}
Timing.setEndTime();
Timing.updateAccruedTime();
Timing.incrementCalls();
}
/*
* Method returns true if solution at index is wekly dominated by the other set members
*/
private boolean setWeakDominates(ArraySolution s){
if (type.equals(MinimisationType.STANDARD)){
for (ArraySolution i : nondominatedSet)
if (weakDominates( i.getTestsPassed(), s.getTestsPassed() ))
return true;
} else {
for (ArraySolution i : nondominatedSet)
if (weakDominates( i.getTestsPassed(), s.getTestsPassed(), i.size(), s.size() ))
return true;
}
return false;
}
private void addToParetoSet(ArraySolution s) {
Set<ArraySolution> remove = new HashSet<>();
for (ArraySolution i : nondominatedSet) // mark any now dominated in set
if (weakDominates( s.getTestsPassed(), i.getTestsPassed() ))
remove.add(i); // mark i for removal
// remove any now dominated members, and move to dominated set
nondominatedSet.removeAll(remove);
dominatedSet.addAll(remove);
// add new entrant to Pareto set
nondominatedSet.add(s);
}
private boolean weakDominates(boolean[] a, boolean[] b, int lengthA, int lengthB){
for (int i = 0; i<a.length; i++)
if ((!a[i]) && (b[i])) // if b[i] is true, but a[i] isn't then a[i] can't dom
return false;
for (int i = 0; i<a.length; i++)
if (a[i] != b[i])
return true; // weak dominates but not equal, so no size check needed
// solutions are equal on all criteria, so check size
if (lengthA > lengthB)
return false;
return true;
}
private boolean weakDominates(boolean[] a, boolean[] b){
for (int i = 0; i<a.length; i++)
if ((!a[i]) && (b[i]))
return false;
return true;
}
private int sampleRandomIndexExcludingElite(HashMap<Integer, ArraySolution> pop) {
int index = RandomNumberGenerator.getRandom().nextInt(dominatedSet.size());
int i = 0;
ArraySolution solution = null;
for (ArraySolution a : dominatedSet) {
if (i == index){
solution = a;
break;
}
i++;
}
int sampleIndex = -1;
// now find the corresponding index of the solutio in pop
for (Entry<Integer, ArraySolution> e : pop.entrySet()){
if (e.getValue() == solution){
sampleIndex = e.getKey();
break;
}
}
if (sampleIndex==-1)
System.out.println("err in samp rand exc elite");
return sampleIndex;
}
private ArraySolution sampleRandomExcludingElite() {
List<ArraySolution> list = new ArrayList<>(dominatedSet);
Collections.shuffle(list);
return list.get(0);
}
/**
* @InheritDoc
*/
@Override
public void generateNextSearchPopulation(HashMap<Integer, ArraySolution> pop, HashMap<Integer, ArraySolution> children) {
Set<ArraySolution> setOfBestSolutions = new HashSet<>();
HashMap<Integer, ArraySolution> combinedPop = new HashMap<>(pop);
combinedPop.putAll(children);
// preserve nondominated where possible, and remove via negative
// tournament selection
if (nondominatedSet.size() <= pop.size() ){ // can preserve all non dominated
while (maintainedSetsTotalSize() > pop.size())
negativeTournament(combinedPop);
setOfBestSolutions.addAll(dominatedSet);
} else { // have to remove from dominated
dominatedSet.clear();
while (nondominatedSet.size() > pop.size())
negativeTournament(combinedPop);
}
setOfBestSolutions.addAll(nondominatedSet);
// setOfBestSolutions now includes parameters.POPULATION_SIZE solutions to preserve
int i=0;
// replace the search population
for (ArraySolution s : setOfBestSolutions){
pop.put(i,s);
i++;
}
assert(maintainedSetsTotalSize() == pop.size()) : "Internal maintained sets do not match search population size after truncation";
}
/**
* Gets the number of non-dominated members in the search population
*
* @return size of estimated Pareto set
*/
public int getParetoSetSize() {
return nondominatedSet.size();
}
private int maintainedSetsTotalSize() {
return nondominatedSet.size() + dominatedSet.size();
}
}
| |
package org.oliot.epcis.service.capture;
import java.io.IOException;
import java.io.InputStream;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.bind.JAXB;
import org.apache.log4j.Level;
import org.bson.BsonArray;
import org.bson.BsonDocument;
import org.bson.BsonInt64;
import org.bson.BsonString;
import org.oliot.epcis.configuration.Configuration;
import org.oliot.epcis.service.subscription.TriggerEngine;
import org.oliot.model.ale.ECReport;
import org.oliot.model.ale.ECReportGroup;
import org.oliot.model.ale.ECReportGroupList;
import org.oliot.model.ale.ECReportGroupListMember;
import org.oliot.model.ale.ECReportMemberField;
import org.oliot.model.ale.ECReports;
import org.oliot.model.ale.ECReportGroupListMemberExtension.FieldList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.context.ServletContextAware;
import com.mongodb.client.MongoCollection;
/**
* Copyright (C) 2014 Jaewook Jack Byun
*
* This project is part of Oliot (oliot.org), pursuing the implementation of
* Electronic Product Code Information Service(EPCIS) v1.1 specification in
* EPCglobal.
* [http://www.gs1.org/gsmp/kc/epcglobal/epcis/epcis_1_1-standard-20140520.pdf]
*
*
* @author Jaewook Jack Byun, Ph.D student
*
* Korea Advanced Institute of Science and Technology (KAIST)
*
* Real-time Embedded System Laboratory(RESL)
*
* bjw0829@kaist.ac.kr, bjw0829@gmail.com
*/
@Controller
@RequestMapping("/ALECapture")
public class ALECapture implements ServletContextAware {
@Autowired
ServletContext servletContext;
@RequestMapping
public void post(HttpServletRequest request, HttpServletResponse response) {
try {
Configuration.logger.info(" ECReport Capture Started.... ");
// Identifying what the event type is
String eventType = request.getParameter("eventType");
// Default Event Type
if (eventType == null)
eventType = "ObjectEvent";
// Get ECReport
InputStream is = request.getInputStream();
ECReports ecReports;
if (Configuration.isCaptureVerfificationOn == true) {
String xmlDocumentString = CaptureUtil.getXMLDocumentString(is);
InputStream validateStream = CaptureUtil.getXMLDocumentInputStream(xmlDocumentString);
boolean isValidated = CaptureUtil.validate(validateStream,
Configuration.wsdlPath + "/EPCglobal-ale-1_1-ale.xsd");
if (isValidated == false) {
Configuration.logger.info(" ECReport : Verification Failed ");
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
return;
}
InputStream ecReportStream = CaptureUtil.getXMLDocumentInputStream(xmlDocumentString);
Configuration.logger.info(" ECReport : Validated ");
ecReports = JAXB.unmarshal(ecReportStream, ECReports.class);
} else {
ecReports = JAXB.unmarshal(is, ECReports.class);
}
capture(ecReports, request);
} catch (IOException e) {
Configuration.logger.log(Level.ERROR, e.toString());
}
}
@Override
public void setServletContext(ServletContext servletContext) {
this.servletContext = servletContext;
}
private boolean capture(ECReports ecReports, HttpServletRequest request) {
// Event Time in timemillis , type long
long eventTime = CaptureUtil.getEventTime(ecReports).toGregorianCalendar().getTimeInMillis();
// Event Time Zone
String eventTimeZoneOffset = request.getParameter("eventTimeZoneOffset");
// Record Time : according to M5
GregorianCalendar recordTime = new GregorianCalendar();
long recordTimeMillis = recordTime.getTimeInMillis();
// Action
String action = request.getParameter("action");
// Biz Step
String bizStep = request.getParameter("bizStep");
// Disposition
String disposition = request.getParameter("disposition");
// Read Point
String readPoint = request.getParameter("readPoint");
// BizLocation
String bizLocation = request.getParameter("bizLocation");
List<ECReport> ecReportList = ecReports.getReports().getReport();
for (int i = 0; i < ecReportList.size(); i++) {
ECReport ecReport = ecReportList.get(i);
if (ecReport.getGroup() == null)
continue;
List<ECReportGroup> ecReportGroups = ecReport.getGroup();
for (int j = 0; j < ecReportGroups.size(); j++) {
ECReportGroup ecReportGroup = ecReportGroups.get(j);
if (ecReportGroup.getGroupList() == null)
continue;
ECReportGroupList ecReportGroupList = ecReportGroup.getGroupList();
List<ECReportGroupListMember> members = ecReportGroupList.getMember();
for (int k = 0; k < members.size(); k++) {
ECReportGroupListMember member = members.get(k);
String epcString = member.getEpc().getValue();
if (member.getExtension() == null)
continue;
if (member.getExtension().getFieldList() == null)
continue;
FieldList fieldList = member.getExtension().getFieldList();
List<ECReportMemberField> fields = fieldList.getField();
Map<String, Object> extMap = new HashMap<String, Object>();
for (int l = 0; l < fields.size(); l++) {
ECReportMemberField field = fields.get(l);
String key = field.getName();
String value = field.getValue();
String[] valArr = value.split("\\^");
if (valArr.length != 2) {
extMap.put(key, value);
continue;
}
try {
String type = valArr[1];
if (type.equals("int")) {
extMap.put(key, Integer.parseInt(valArr[0]));
} else if (type.equals("long")) {
extMap.put(key, Long.parseLong(valArr[0]));
} else if (type.equals("float")) {
extMap.put(key, Float.parseFloat(valArr[0]));
} else if (type.equals("double")) {
extMap.put(key, Double.parseDouble(valArr[0]));
} else if (type.equals("boolean")) {
extMap.put(key, Boolean.parseBoolean(valArr[0]));
} else {
extMap.put(key, valArr[0]);
}
} catch (NumberFormatException e) {
extMap.put(key, valArr[0]);
}
}
BsonDocument dbo = new BsonDocument();
// EPC
BsonArray epcList = new BsonArray();
BsonDocument epc = new BsonDocument();
epc.put("epc", new BsonString(epcString));
epcList.add(epc);
dbo.put("epcList", epcList);
dbo.put("eventTime", new BsonInt64(eventTime));
if (eventTimeZoneOffset == null) {
dbo.put("eventTimeZoneOffset", new BsonString("+09:00"));
} else {
dbo.put("eventTimeZoneOffset", new BsonString(eventTimeZoneOffset));
}
dbo.put("recordTime", new BsonInt64(recordTimeMillis));
if (action == null) {
dbo.put("action", new BsonString("OBSERVE"));
} else {
dbo.put("action", new BsonString(action));
}
if (bizStep != null) {
dbo.put("bizStep", new BsonString(bizStep));
}
if (disposition != null) {
dbo.put("dispsition", new BsonString(disposition));
}
if (readPoint != null) {
dbo.put("readPoint", new BsonDocument("id", new BsonString(readPoint)));
}
if (bizLocation != null) {
dbo.put("bizLocation", new BsonDocument("id", new BsonString(bizLocation)));
}
// Extension Field
if (extMap.isEmpty() == false) {
Iterator<String> keyIterator = extMap.keySet().iterator();
BsonDocument any = new BsonDocument();
any.put("@ale", new BsonString("http://" + request.getLocalAddr() + ":" + request.getLocalPort()
+ request.getContextPath() + "/schema/aleCapture.xsd"));
while (keyIterator.hasNext()) {
String key = keyIterator.next();
Object value = extMap.get(key);
any.put("ale:" + key, new BsonString(value.toString()));
}
dbo.put("any", any);
}
MongoCollection<BsonDocument> collection = Configuration.mongoDatabase.getCollection("ObjectEvent",
BsonDocument.class);
if (Configuration.isTriggerSupported == true) {
TriggerEngine.examineAndFire("ObjectEvent", dbo);
}
collection.insertOne(dbo);
}
}
}
return true;
}
}
| |
package com.av001.web.rest;
import com.av001.Application;
import com.av001.domain.ShippingAddress;
import com.av001.repository.ShippingAddressRepository;
import com.av001.repository.search.ShippingAddressSearchRepository;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.hamcrest.Matchers.hasItem;
import org.mockito.MockitoAnnotations;
import org.springframework.boot.test.IntegrationTest;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.data.web.PageableHandlerMethodArgumentResolver;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the ShippingAddressResource REST controller.
*
* @see ShippingAddressResource
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = Application.class)
@WebAppConfiguration
@IntegrationTest
public class ShippingAddressResourceTest {
private static final String DEFAULT_ADDRESS1 = "AAAAA";
private static final String UPDATED_ADDRESS1 = "BBBBB";
private static final String DEFAULT_ADDRESS2 = "AAAAA";
private static final String UPDATED_ADDRESS2 = "BBBBB";
private static final String DEFAULT_VILLE = "AAAAA";
private static final String UPDATED_VILLE = "BBBBB";
private static final String DEFAULT_CODE_POSTAL = "AAAAA";
private static final String UPDATED_CODE_POSTAL = "BBBBB";
private static final String DEFAULT_PAYS = "AAAAA";
private static final String UPDATED_PAYS = "BBBBB";
@Inject
private ShippingAddressRepository shippingAddressRepository;
@Inject
private ShippingAddressSearchRepository shippingAddressSearchRepository;
@Inject
private MappingJackson2HttpMessageConverter jacksonMessageConverter;
@Inject
private PageableHandlerMethodArgumentResolver pageableArgumentResolver;
private MockMvc restShippingAddressMockMvc;
private ShippingAddress shippingAddress;
@PostConstruct
public void setup() {
MockitoAnnotations.initMocks(this);
ShippingAddressResource shippingAddressResource = new ShippingAddressResource();
ReflectionTestUtils.setField(shippingAddressResource, "shippingAddressRepository", shippingAddressRepository);
ReflectionTestUtils.setField(shippingAddressResource, "shippingAddressSearchRepository", shippingAddressSearchRepository);
this.restShippingAddressMockMvc = MockMvcBuilders.standaloneSetup(shippingAddressResource)
.setCustomArgumentResolvers(pageableArgumentResolver)
.setMessageConverters(jacksonMessageConverter).build();
}
@Before
public void initTest() {
shippingAddress = new ShippingAddress();
shippingAddress.setAddress1(DEFAULT_ADDRESS1);
shippingAddress.setAddress2(DEFAULT_ADDRESS2);
shippingAddress.setVille(DEFAULT_VILLE);
shippingAddress.setCodePostal(DEFAULT_CODE_POSTAL);
shippingAddress.setPays(DEFAULT_PAYS);
}
@Test
@Transactional
public void createShippingAddress() throws Exception {
int databaseSizeBeforeCreate = shippingAddressRepository.findAll().size();
// Create the ShippingAddress
restShippingAddressMockMvc.perform(post("/api/shippingAddresss")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(shippingAddress)))
.andExpect(status().isCreated());
// Validate the ShippingAddress in the database
List<ShippingAddress> shippingAddresss = shippingAddressRepository.findAll();
assertThat(shippingAddresss).hasSize(databaseSizeBeforeCreate + 1);
ShippingAddress testShippingAddress = shippingAddresss.get(shippingAddresss.size() - 1);
assertThat(testShippingAddress.getAddress1()).isEqualTo(DEFAULT_ADDRESS1);
assertThat(testShippingAddress.getAddress2()).isEqualTo(DEFAULT_ADDRESS2);
assertThat(testShippingAddress.getVille()).isEqualTo(DEFAULT_VILLE);
assertThat(testShippingAddress.getCodePostal()).isEqualTo(DEFAULT_CODE_POSTAL);
assertThat(testShippingAddress.getPays()).isEqualTo(DEFAULT_PAYS);
}
@Test
@Transactional
public void getAllShippingAddresss() throws Exception {
// Initialize the database
shippingAddressRepository.saveAndFlush(shippingAddress);
// Get all the shippingAddresss
restShippingAddressMockMvc.perform(get("/api/shippingAddresss"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$.[*].id").value(hasItem(shippingAddress.getId().intValue())))
.andExpect(jsonPath("$.[*].address1").value(hasItem(DEFAULT_ADDRESS1.toString())))
.andExpect(jsonPath("$.[*].address2").value(hasItem(DEFAULT_ADDRESS2.toString())))
.andExpect(jsonPath("$.[*].ville").value(hasItem(DEFAULT_VILLE.toString())))
.andExpect(jsonPath("$.[*].codePostal").value(hasItem(DEFAULT_CODE_POSTAL.toString())))
.andExpect(jsonPath("$.[*].pays").value(hasItem(DEFAULT_PAYS.toString())));
}
@Test
@Transactional
public void getShippingAddress() throws Exception {
// Initialize the database
shippingAddressRepository.saveAndFlush(shippingAddress);
// Get the shippingAddress
restShippingAddressMockMvc.perform(get("/api/shippingAddresss/{id}", shippingAddress.getId()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$.id").value(shippingAddress.getId().intValue()))
.andExpect(jsonPath("$.address1").value(DEFAULT_ADDRESS1.toString()))
.andExpect(jsonPath("$.address2").value(DEFAULT_ADDRESS2.toString()))
.andExpect(jsonPath("$.ville").value(DEFAULT_VILLE.toString()))
.andExpect(jsonPath("$.codePostal").value(DEFAULT_CODE_POSTAL.toString()))
.andExpect(jsonPath("$.pays").value(DEFAULT_PAYS.toString()));
}
@Test
@Transactional
public void getNonExistingShippingAddress() throws Exception {
// Get the shippingAddress
restShippingAddressMockMvc.perform(get("/api/shippingAddresss/{id}", Long.MAX_VALUE))
.andExpect(status().isNotFound());
}
@Test
@Transactional
public void updateShippingAddress() throws Exception {
// Initialize the database
shippingAddressRepository.saveAndFlush(shippingAddress);
int databaseSizeBeforeUpdate = shippingAddressRepository.findAll().size();
// Update the shippingAddress
shippingAddress.setAddress1(UPDATED_ADDRESS1);
shippingAddress.setAddress2(UPDATED_ADDRESS2);
shippingAddress.setVille(UPDATED_VILLE);
shippingAddress.setCodePostal(UPDATED_CODE_POSTAL);
shippingAddress.setPays(UPDATED_PAYS);
restShippingAddressMockMvc.perform(put("/api/shippingAddresss")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(shippingAddress)))
.andExpect(status().isOk());
// Validate the ShippingAddress in the database
List<ShippingAddress> shippingAddresss = shippingAddressRepository.findAll();
assertThat(shippingAddresss).hasSize(databaseSizeBeforeUpdate);
ShippingAddress testShippingAddress = shippingAddresss.get(shippingAddresss.size() - 1);
assertThat(testShippingAddress.getAddress1()).isEqualTo(UPDATED_ADDRESS1);
assertThat(testShippingAddress.getAddress2()).isEqualTo(UPDATED_ADDRESS2);
assertThat(testShippingAddress.getVille()).isEqualTo(UPDATED_VILLE);
assertThat(testShippingAddress.getCodePostal()).isEqualTo(UPDATED_CODE_POSTAL);
assertThat(testShippingAddress.getPays()).isEqualTo(UPDATED_PAYS);
}
@Test
@Transactional
public void deleteShippingAddress() throws Exception {
// Initialize the database
shippingAddressRepository.saveAndFlush(shippingAddress);
int databaseSizeBeforeDelete = shippingAddressRepository.findAll().size();
// Get the shippingAddress
restShippingAddressMockMvc.perform(delete("/api/shippingAddresss/{id}", shippingAddress.getId())
.accept(TestUtil.APPLICATION_JSON_UTF8))
.andExpect(status().isOk());
// Validate the database is empty
List<ShippingAddress> shippingAddresss = shippingAddressRepository.findAll();
assertThat(shippingAddresss).hasSize(databaseSizeBeforeDelete - 1);
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Strings.isNullOrEmpty;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.io.CharSource;
import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import com.google.common.io.Resources;
import com.google.javascript.rhino.StaticSourceFile;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Serializable;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.Objects;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* An abstract representation of a source file that provides access to language-neutral features.
* The source file can be loaded from various locations, such as from disk or from a preloaded
* string.
*
* @author nicksantos@google.com (Nick Santos)
*/
public class SourceFile implements StaticSourceFile, Serializable {
private static final long serialVersionUID = 1L;
private static final String UTF8_BOM = "\uFEFF";
/** A JavaScript source code provider. The value should
* be cached so that the source text stays consistent throughout a single
* compile. */
public interface Generator {
public String getCode();
}
/**
* Number of lines in the region returned by {@link #getRegion(int)}.
* This length must be odd.
*/
private static final int SOURCE_EXCERPT_REGION_LENGTH = 5;
private final String fileName;
private boolean isExternFile = false;
// The fileName may not always identify the original file - for example,
// supersourced Java inputs, or Java inputs that come from Jar files. This
// is an optional field that the creator of an AST or SourceFile can set.
// It could be a path to the original file, or in case this SourceFile came
// from a Jar, it could be the path to the Jar.
private String originalPath = null;
// Source Line Information
private int[] lineOffsets = null;
private String code = null;
static final DiagnosticType DUPLICATE_ZIP_CONTENTS = DiagnosticType.warning(
"JSC_DUPLICATE_ZIP_CONTENTS",
"Two zip entries containing the same relative path.\n"
+ "Entry 1: {0}\n"
+ "Entry 2: {1}");
/**
* Construct a new abstract source file.
*
* @param fileName The file name of the source file. It does not necessarily need to correspond to
* a real path. But it should be unique. Will appear in warning messages emitted by the
* compiler.
*/
public SourceFile(String fileName) {
if (isNullOrEmpty(fileName)) {
throw new IllegalArgumentException("a source must have a name");
}
if (!"/".equals(File.separator)) {
this.fileName = fileName.replace(File.separator, "/");
} else {
this.fileName = fileName;
}
}
@Override
public int getLineOffset(int lineno) {
findLineOffsets();
if (lineno < 1 || lineno > lineOffsets.length) {
throw new IllegalArgumentException(
"Expected line number between 1 and " + lineOffsets.length +
"\nActual: " + lineno);
}
return lineOffsets[lineno - 1];
}
/** @return The number of lines in this source file. */
int getNumLines() {
findLineOffsets();
return lineOffsets.length;
}
private void findLineOffsets() {
if (lineOffsets != null) {
return;
}
try {
String[] sourceLines = getCode().split("\n", -1);
lineOffsets = new int[sourceLines.length];
for (int ii = 1; ii < sourceLines.length; ++ii) {
lineOffsets[ii] =
lineOffsets[ii - 1] + sourceLines[ii - 1].length() + 1;
}
} catch (IOException e) {
lineOffsets = new int[1];
lineOffsets[0] = 0;
}
}
private void resetLineOffsets() {
lineOffsets = null;
}
//////////////////////////////////////////////////////////////////////////////
// Implementation
/**
* Gets all the code in this source file.
* @throws IOException
*/
public String getCode() throws IOException {
return code;
}
/**
* Gets a char source for the code in this source file.
*/
@GwtIncompatible("com.google.common.io.CharSource")
public CharSource getCodeCharSource() {
return CharSource.wrap(code);
}
/**
* Gets a reader for the code in this source file.
*/
@GwtIncompatible("java.io.Reader")
public Reader getCodeReader() throws IOException {
return new StringReader(getCode());
}
@VisibleForTesting
String getCodeNoCache() {
return code;
}
void setCode(String sourceCode) {
this.setCode(sourceCode, false);
}
void setCode(String sourceCode, boolean removeUtf8Bom) {
if (removeUtf8Bom && sourceCode != null && sourceCode.startsWith(UTF8_BOM)) {
code = sourceCode.substring(UTF8_BOM.length());
} else {
code = sourceCode;
}
resetLineOffsets();
}
public String getOriginalPath() {
return originalPath != null ? originalPath : fileName;
}
public void setOriginalPath(String originalPath) {
this.originalPath = originalPath;
}
// For SourceFile types which cache source code that can be regenerated
// easily, flush the cache. We maintain the cache mostly to speed up
// generating source when displaying error messages, so dumping the file
// contents after the compile is a fine thing to do.
public void clearCachedSource() {
// By default, do nothing. Not all kinds of SourceFiles can regenerate
// code.
}
boolean hasSourceInMemory() {
return code != null;
}
/** Returns a unique name for the source file. */
@Override
public String getName() {
return fileName;
}
/** Returns whether this is an extern. */
@Override
public boolean isExtern() {
return isExternFile;
}
/** Sets that this is an extern. */
void setIsExtern(boolean newVal) {
isExternFile = newVal;
}
@Override
public int getLineOfOffset(int offset) {
findLineOffsets();
int search = Arrays.binarySearch(lineOffsets, offset);
if (search >= 0) {
return search + 1; // lines are 1-based.
} else {
int insertionPoint = -1 * (search + 1);
return Math.min(insertionPoint - 1, lineOffsets.length - 1) + 1;
}
}
@Override
public int getColumnOfOffset(int offset) {
int line = getLineOfOffset(offset);
return offset - lineOffsets[line - 1];
}
/**
* Gets the source line for the indicated line number.
*
* @param lineNumber the line number, 1 being the first line of the file.
* @return The line indicated. Does not include the newline at the end
* of the file. Returns {@code null} if it does not exist,
* or if there was an IO exception.
*/
public String getLine(int lineNumber) {
findLineOffsets();
if (lineNumber > lineOffsets.length) {
return null;
}
if (lineNumber < 1) {
lineNumber = 1;
}
int pos = lineOffsets[lineNumber - 1];
String js = "";
try {
// NOTE(nicksantos): Right now, this is optimized for few warnings.
// This is probably the right trade-off, but will be slow if there
// are lots of warnings in one file.
js = getCode();
} catch (IOException e) {
return null;
}
if (js.indexOf('\n', pos) == -1) {
// If next new line cannot be found, there are two cases
// 1. pos already reaches the end of file, then null should be returned
// 2. otherwise, return the contents between pos and the end of file.
if (pos >= js.length()) {
return null;
} else {
return js.substring(pos, js.length());
}
} else {
return js.substring(pos, js.indexOf('\n', pos));
}
}
/**
* Get a region around the indicated line number. The exact definition of a
* region is implementation specific, but it must contain the line indicated
* by the line number. A region must not start or end by a carriage return.
*
* @param lineNumber the line number, 1 being the first line of the file.
* @return The line indicated. Returns {@code null} if it does not exist,
* or if there was an IO exception.
*/
public Region getRegion(int lineNumber) {
String js = "";
try {
js = getCode();
} catch (IOException e) {
return null;
}
int pos = 0;
int startLine = Math.max(1,
lineNumber - (SOURCE_EXCERPT_REGION_LENGTH + 1) / 2 + 1);
for (int n = 1; n < startLine; n++) {
int nextpos = js.indexOf('\n', pos);
if (nextpos == -1) {
break;
}
pos = nextpos + 1;
}
int end = pos;
int endLine = startLine;
for (int n = 0; n < SOURCE_EXCERPT_REGION_LENGTH; n++, endLine++) {
end = js.indexOf('\n', end);
if (end == -1) {
break;
}
end++;
}
if (lineNumber >= endLine) {
return null;
}
if (end == -1) {
int last = js.length() - 1;
if (js.charAt(last) == '\n') {
return
new SimpleRegion(startLine, endLine, js.substring(pos, last));
} else {
return new SimpleRegion(startLine, endLine, js.substring(pos));
}
} else {
return new SimpleRegion(startLine, endLine, js.substring(pos, end));
}
}
@Override
public String toString() {
return fileName;
}
@GwtIncompatible("java.util.zip.ZipFile")
public static List<SourceFile> fromZipFile(String zipName, Charset inputCharset)
throws IOException {
final String absoluteZipPath = new File(zipName).getAbsolutePath();
List<SourceFile> sourceFiles = new ArrayList<>();
try (ZipFile zipFile = new ZipFile(absoluteZipPath)) {
Enumeration<? extends ZipEntry> zipEntries = zipFile.entries();
while (zipEntries.hasMoreElements()) {
ZipEntry zipEntry = zipEntries.nextElement();
String entryName = zipEntry.getName();
if (!entryName.endsWith(".js")) { // Only accept js files
continue;
}
sourceFiles.add(fromZipEntry(zipName, absoluteZipPath, entryName, inputCharset));
}
}
return sourceFiles;
}
static final String BANG_SLASH = "!/";
static final String JAR_URL_PREFIX = "jar:file:";
@GwtIncompatible("java.net.URL")
public static SourceFile fromZipEntry(
String originalZipPath, String absoluteZipPath, String entryPath, Charset inputCharset)
throws MalformedURLException {
String zipEntryPath = JAR_URL_PREFIX + absoluteZipPath + BANG_SLASH + entryPath;
URL zipEntryUrl = new URL(zipEntryPath);
return builder()
.withCharset(inputCharset)
.withOriginalPath(originalZipPath + "!/" + entryPath)
.buildFromUrl(zipEntryUrl);
}
@GwtIncompatible("java.io.File")
public static SourceFile fromFile(String fileName, Charset charset) {
if (fileName.contains(BANG_SLASH)) {
String[] components = fileName.split(BANG_SLASH);
try {
String zipPath = components[0];
String relativePath = components[1];
return fromZipEntry(zipPath, zipPath, relativePath, charset);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
return builder().withCharset(charset).buildFromFile(fileName);
}
public static SourceFile fromFile(String fileName) {
return builder().buildFromFile(fileName);
}
@GwtIncompatible("java.io.File")
public static SourceFile fromFile(File file, Charset c) {
return builder().withCharset(c).buildFromFile(file);
}
@GwtIncompatible("java.io.File")
public static SourceFile fromFile(File file) {
return builder().buildFromFile(file);
}
public static SourceFile fromCode(String fileName, String code) {
return builder().buildFromCode(fileName, code);
}
/**
* @deprecated Use {@link #fromInputStream(String, InputStream, Charset)}
*/
@Deprecated
@GwtIncompatible("java.io.InputStream")
public static SourceFile fromInputStream(String fileName, InputStream s)
throws IOException {
return builder().buildFromInputStream(fileName, s);
}
@GwtIncompatible("java.io.InputStream")
public static SourceFile fromInputStream(String fileName, InputStream s,
Charset charset) throws IOException {
return builder().withCharset(charset).buildFromInputStream(fileName, s);
}
@GwtIncompatible("java.io.Reader")
public static SourceFile fromReader(String fileName, Reader r)
throws IOException {
return builder().buildFromReader(fileName, r);
}
public static SourceFile fromGenerator(String fileName,
Generator generator) {
return builder().buildFromGenerator(fileName, generator);
}
/** Create a new builder for source files. */
public static Builder builder() {
return new Builder();
}
/**
* A builder interface for source files.
*
* Allows users to customize the Charset, and the original path of
* the source file (if it differs from the path on disk).
*/
public static class Builder {
private Charset charset = UTF_8;
private String originalPath = null;
public Builder() {}
/** Set the charset to use when reading from an input stream or file. */
public Builder withCharset(Charset charset) {
this.charset = charset;
return this;
}
public Builder withOriginalPath(String originalPath) {
this.originalPath = originalPath;
return this;
}
public SourceFile buildFromFile(String fileName) {
return buildFromFile(new File(fileName));
}
public SourceFile buildFromFile(File file) {
return new OnDisk(file, originalPath, charset);
}
@GwtIncompatible("java.net.URL")
public SourceFile buildFromUrl(URL url) {
return new AtUrl(url, originalPath, charset);
}
public SourceFile buildFromCode(String fileName, String code) {
return new Preloaded(fileName, originalPath, code);
}
@GwtIncompatible("java.io.InputStream")
public SourceFile buildFromInputStream(String fileName, InputStream s)
throws IOException {
return buildFromCode(fileName,
CharStreams.toString(new InputStreamReader(s, charset)));
}
@GwtIncompatible("java.io.Reader")
public SourceFile buildFromReader(String fileName, Reader r)
throws IOException {
return buildFromCode(fileName, CharStreams.toString(r));
}
public SourceFile buildFromGenerator(String fileName,
Generator generator) {
return new Generated(fileName, originalPath, generator);
}
}
//////////////////////////////////////////////////////////////////////////////
// Implementations
/**
* A source file where the code has been preloaded.
*/
static class Preloaded extends SourceFile {
private static final long serialVersionUID = 1L;
Preloaded(String fileName, String originalPath, String code) {
super(fileName);
super.setOriginalPath(originalPath);
super.setCode(code);
}
}
/**
* A source file where the code will be dynamically generated
* from the injected interface.
*/
static class Generated extends SourceFile {
private static final long serialVersionUID = 1L;
private final Generator generator;
// Not private, so that LazyInput can extend it.
Generated(String fileName, String originalPath, Generator generator) {
super(fileName);
super.setOriginalPath(originalPath);
this.generator = generator;
}
@Override
public synchronized String getCode() throws IOException {
String cachedCode = super.getCode();
if (cachedCode == null) {
cachedCode = generator.getCode();
super.setCode(cachedCode);
}
return cachedCode;
}
// Clear out the generated code when finished with a compile; we can
// regenerate it if we ever need it again.
@Override
public void clearCachedSource() {
super.setCode(null);
}
}
/**
* A source file where the code is only read into memory if absolutely
* necessary. We will try to delay loading the code into memory as long as
* possible.
*/
static class OnDisk extends SourceFile {
private static final long serialVersionUID = 1L;
private final File file;
// This is stored as a String, but passed in and out as a Charset so that
// we can serialize the class.
// Default input file format for the compiler has always been UTF_8.
private String inputCharset = UTF_8.name();
OnDisk(File file, String originalPath, Charset c) {
super(file.getPath());
this.file = file;
super.setOriginalPath(originalPath);
if (c != null) {
this.setCharset(c);
}
}
@Override
public synchronized String getCode() throws IOException {
String cachedCode = super.getCode();
if (cachedCode == null) {
cachedCode = Files.toString(file, this.getCharset());
super.setCode(cachedCode, Objects.equals(this.getCharset(), StandardCharsets.UTF_8));
// Byte Order Mark can be removed by setCode
cachedCode = super.getCode();
}
return cachedCode;
}
/**
* Gets a char source for the code in this source file.
*/
@Override
@GwtIncompatible("Files.asCharSource()")
public CharSource getCodeCharSource() {
if (hasSourceInMemory()) {
return super.getCodeCharSource();
} else {
// If we haven't pulled the code into memory yet, don't.
return Files.asCharSource(file, StandardCharsets.UTF_8);
}
}
/**
* Gets a reader for the code in this source file.
*/
@Override
@GwtIncompatible("java.io.Reader")
public Reader getCodeReader() throws IOException {
if (hasSourceInMemory()) {
return super.getCodeReader();
} else {
// If we haven't pulled the code into memory yet, don't.
return Files.newReader(file, StandardCharsets.UTF_8);
}
}
// Flush the cached code after the compile; we can read it off disk
// if we need it again.
@Override
public void clearCachedSource() {
super.setCode(null);
}
/**
* Store the Charset specification as the string version of the name,
* rather than the Charset itself. This allows us to serialize the
* SourceFile class.
* @param c charset to use when reading the input.
*/
public void setCharset(Charset c) {
inputCharset = c.name();
}
/**
* Get the Charset specifying how we're supposed to read the file
* in off disk and into UTF-16. This is stored as a strong to allow
* SourceFile to be serialized.
* @return Charset object representing charset to use.
*/
public Charset getCharset() {
return Charset.forName(inputCharset);
}
}
/**
* A source file at a URL where the code is only read into memory if absolutely
* necessary. We will try to delay loading the code into memory as long as
* possible.
* <p>
* In practice this is used to load code in entries inside of zip files.
*/
@GwtIncompatible("java.net.URL")
static class AtUrl extends SourceFile {
private static final long serialVersionUID = 1L;
private final URL url;
// This is stored as a String, but passed in and out as a Charset so that
// we can serialize the class.
// Default input file format for the compiler has always been UTF_8.
private String inputCharset = UTF_8.name();
AtUrl(URL url, String originalPath, Charset c) {
super(originalPath);
this.url = url;
super.setOriginalPath(originalPath);
if (c != null) {
this.setCharset(c);
}
}
@Override
public synchronized String getCode() throws IOException {
String cachedCode = super.getCode();
if (cachedCode == null) {
URLConnection urlConnection = url.openConnection();
// Perform the read through the URL connection while making sure that it does not internally
// cache, because its default internal caching would defeat our own cache management.
urlConnection.setUseCaches(false);
InputStream inputStream = urlConnection.getInputStream();
cachedCode = CharStreams.toString(new InputStreamReader(inputStream, this.getCharset()));
// Must close the stream or else the cache won't be cleared.
inputStream.close();
super.setCode(cachedCode, Objects.equals(this.getCharset(), StandardCharsets.UTF_8));
// Byte Order Mark can be removed by setCode
cachedCode = super.getCode();
}
return cachedCode;
}
/**
* Gets a char source for the code at this URL.
*/
@Override
public CharSource getCodeCharSource() {
if (hasSourceInMemory()) {
return super.getCodeCharSource();
} else {
// If we haven't pulled the code into memory yet, don't.
return Resources.asCharSource(url, StandardCharsets.UTF_8);
}
}
/**
* Gets a reader for the code at this URL.
*/
@Override
public Reader getCodeReader() throws IOException {
if (hasSourceInMemory()) {
return super.getCodeReader();
} else {
// If we haven't pulled the code into memory yet, don't.
return getCodeCharSource().openStream();
}
}
// Flush the cached code after the compile; we can read it from the URL
// if we need it again.
@Override
public void clearCachedSource() {
super.setCode(null);
}
/**
* Store the Charset specification as the string version of the name,
* rather than the Charset itself. This allows us to serialize the
* SourceFile class.
* @param c charset to use when reading the input.
*/
public void setCharset(Charset c) {
inputCharset = c.name();
}
/**
* Get the Charset specifying how we're supposed to read the URL
* into UTF-16. This is stored as a string to allow SourceFile to be
* serialized.
* @return Charset object representing charset to use.
*/
public Charset getCharset() {
return Charset.forName(inputCharset);
}
}
}
| |
/*L
* Copyright Georgetown University, Washington University.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cab2b/LICENSE.txt for details.
*/
/**
* <p>Title: DataRow Class>
* <p>Description: Class which represents a row in the data list or the attribute names in the
* data list.</p>
* Copyright: Copyright (c) year
* Company: Washington University, School of Medicine, St. Louis.
* @author Gautam Shetty
* @version 1.00
*/
package edu.wustl.cab2b.common.datalist;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import edu.common.dynamicextensions.domaininterface.EntityInterface;
import edu.wustl.cab2b.common.queryengine.result.IRecord;
import edu.wustl.cab2b.common.util.Utility;
import edu.wustl.common.querysuite.metadata.associations.IAssociation;
import edu.wustl.common.tree.TreeNodeImpl;
/**
* Class which represents a row in the data list or the attribute names in the
* data list.
* @author gautam_shetty
*/
public class DataRow extends TreeNodeImpl implements IDataRow, Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
/**
* Name of the class to which this data
* belongs.
*/
private String className;
/**
* parent of this node.
*/
private IDataRow parent;
/**
* Children data list rows.
*/
private List<IDataRow> children = new ArrayList<IDataRow>();
/**
*
*/
private EntityInterface entityInterface = null;
/**
*
*/
private IAssociation parentAssociation = null;
/**
* Is true if the dataRow contains data else it is false i.e. if
* the dataRow contains attribute names.
*/
boolean isData = true;
/**
*
*/
private IRecord record;
private DataRow() {
}
/**
* @param record
* @param entity
* @param displayName
*/
private DataRow(IRecord record, EntityInterface entity, String displayName) {
this.record = record;
this.entityInterface = entity;
this.className = displayName;
}
public DataRow(IRecord record, EntityInterface entity) {
this(record, entity, "");
if (entity != null) {
this.className = Utility.getDisplayName(entity);
}
}
public String getId() {
String id = null;
if (this.record != null) {
id = this.record.getRecordId().getId();
}
return id;
}
/**
* @return Returns the attributes.
*/
public EntityInterface getEntity() {
return this.entityInterface;
}
/**
* @return Returns the childNodes.
*/
public Vector getChildNodes() {
return new Vector(children);
}
/**
* @return Returns the parent.
*/
public IDataRow getParent() {
return parent;
}
/**
* @param parent The parent to set.
*/
public void setParent(IDataRow parent) {
this.parent = parent;
}
/**
* @return Returns the children.
*/
public List<IDataRow> getChildren() {
return children;
}
/**
* @param children The children to set.
*/
public void setChildren(List<IDataRow> children) {
this.children = children;
}
/**
* Returns the class name.
* @return Returns the className.
*/
public String getClassName() {
return className;
}
/**
* @see edu.wustl.cab2b.common.datalist.IDataRow#getURL()
*/
public String getURL() {
String url = null;
if (this.record != null) {
url = this.record.getRecordId().getUrl();
}
return url;
}
/**
* Returns true if the row contains data else returns false i.e. if the
* row contains attribute names.
* @return Returns the isData.
*/
public boolean isData() {
return isData;
}
/**
* Set true if the row contains data else set it as false.
* @param isData The isData to set.(true/false)
*/
public void setData(boolean isData) {
this.isData = isData;
}
public boolean equals(Object obj) {
boolean flag = false;
if (obj instanceof IDataRow) {
IDataRow dataRow = (IDataRow) obj;
if ((getId() == null && dataRow.getId() == null) && this.getClassName().equals(dataRow.getClassName())
&& (this.getURL().compareToIgnoreCase(dataRow.getURL()) == 0)) {
flag = true;
} else if ((getId() == null && dataRow.getId() != null)
|| (getId() != null && dataRow.getId() == null)) {
flag = false;
} else {
/**
* Data rows are equal if their identifiers match and they belong to same
* entity and same instance of the data service
*/
if (this.getId().equals(dataRow.getId()) && this.getClassName().equals(dataRow.getClassName())
&& (this.getURL().compareToIgnoreCase(dataRow.getURL()) == 0)) {
flag = true;
}
}
}
return flag;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
int i = 0;
if (getId() != null) {
i += getId().hashCode();
}
if (getClassName() != null) {
i += getClassName().hashCode();
}
return i;
}
@Override
public String toString() {
String label = this.className;
String cnt = "";
int size = getChildren().size();
if (size != 0) {
cnt = " (" + size + ")";
}
if (getId() != null) {
label = label + "_" + getId();
} else {
label += cnt;
}
return label;
}
public IAssociation getAssociation() {
return parentAssociation;
}
public void setAssociation(IAssociation association) {
this.parentAssociation = association;
}
public IRecord getRecord() {
return record;
}
/**
* @see edu.wustl.cab2b.common.datalist.IDataRow#getTitleNode()
*/
public IDataRow getTitleNode() {
IDataRow titleDataRow = new DataRow(null, this.entityInterface, this.className);
titleDataRow.setData(false);
return titleDataRow;
}
/**
* @see edu.wustl.cab2b.common.datalist.IDataRow#getCopy()
*/
public IDataRow getCopy() {
IDataRow copiedDataRow = new DataRow(this.record, this.entityInterface, this.className);
copiedDataRow.setData(this.isData);
copiedDataRow.setAssociation(this.parentAssociation);
return copiedDataRow;
}
/**
* @see edu.wustl.cab2b.common.datalist.IDataRow#addChild(edu.wustl.cab2b.common.datalist.IDataRow)
*/
public void addChild(IDataRow childRow) {
this.children.add(childRow);
childRow.setParent(this);
}
}
| |
package signalproc.input;
import org.trianacode.taskgraph.NodeException;
import org.trianacode.taskgraph.Task;
import org.trianacode.taskgraph.Unit;
import org.trianacode.taskgraph.event.*;
import triana.types.VectorType;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
/**
* Imports raw data from a binary file
*
* @author Ian Wang
* @version $Revision $
*/
public class BinaryImporter extends Unit implements TaskListener {
public static final String DOUBLE = "Double (8bytes)";
public static final String FLOAT = "Float (4bytes)";
public static final String LONG = "Long (8bytes)";
public static final String INT = "Int (4bytes)";
public static final String SHORT = "Short (2bytes)";
public static final String BYTE = "Byte (1byte)";
public static final String ONE_BYTE_PER_COLUMN = "One byte per column";
public static final String SAME_AS_FOR_DATA_TYPE = "Same as for data type";
public static final String COLUMNS = "Columns";
public static final String ROWS = "Rows";
public static final String NEVER_REWIND = "Never";
public static final String AUTOMATIC = "Automatic";
public static final String EVERY_RUN = "Every run";
// parameter data type definitions
private String datatype;
private int columns;
private int rows;
private String columnbytes;
private String rowschema;
private String columnschema;
private boolean reversebytes;
private String filename;
private int offset;
private String extract;
private boolean multi;
private String rewind;
private boolean iteroffset;
/**
* the binary importer utility class
*/
private ImportBinary imp;
/**
* a flag indicating one data set has been successfully imported from the input stream
*/
private boolean onesuccess = false;
/*
* Called whenever there is data for the unit to process
*/
public void process() throws Exception {
if ((imp == null) || rewind.equals(EVERY_RUN)) {
openImport();
} else if (iteroffset) {
imp.offset(offset);
}
initImport();
importData();
}
/**
* Open the import binary utility and skips the header
*/
private void openImport() throws IOException, FileNotFoundException {
if (imp != null) {
imp.close();
}
imp = new ImportBinary(new FileInputStream(filename));
imp.offset(offset);
onesuccess = false;
}
/**
* Initialise the parameters on the import binary utility
*/
private void initImport() throws IOException {
imp.setColumns(columns);
imp.setRows(rows);
imp.setOneBytePerColumn(columnbytes.equals(ONE_BYTE_PER_COLUMN));
imp.setReverseByteOrder(reversebytes);
if (datatype.equals(DOUBLE)) {
imp.setDataType(ImportBinary.DOUBLE);
} else if (datatype.equals(FLOAT)) {
imp.setDataType(ImportBinary.FLOAT);
} else if (datatype.equals(LONG)) {
imp.setDataType(ImportBinary.LONG);
} else if (datatype.equals(INT)) {
imp.setDataType(ImportBinary.INT);
} else if (datatype.equals(SHORT)) {
imp.setDataType(ImportBinary.SHORT);
} else if (datatype.equals(BYTE)) {
imp.setDataType(ImportBinary.BYTE);
}
imp.nextDataSet();
}
/**
* Imports data from the input stream using the import binary utility
*/
private void importData() throws IOException {
DataSchema colscheme = DataSchema.getDataSchema(columnschema);
DataSchema rowscheme = DataSchema.getDataSchema(rowschema);
if (extract.equals(ROWS)) {
importRows(colscheme, rowscheme);
} else {
importColumns(colscheme, rowscheme);
}
}
/**
* Imports columns from the input stream
*/
private void importColumns(DataSchema colscheme, DataSchema rowscheme) throws IOException {
VectorType[] dataarray = imp.readColumns(colscheme, rowscheme);
if ((dataarray == null) && (rewind.equals(AUTOMATIC))) {
openImport();
initImport();
dataarray = imp.readColumns(colscheme, rowscheme);
}
if (dataarray != null) {
for (int count = 0; count < dataarray.length; count++) {
if (multi) {
outputAtNode(count, dataarray[count], true);
} else {
output(dataarray[count]);
}
}
} else if (!rewind.equals(NEVER_REWIND)) {
throw (new RuntimeException("Data set exceeds input stream length"));
}
}
/**
* Import rows from the input stream
*/
private void importRows(DataSchema colscheme, DataSchema rowscheme) throws IOException {
VectorType data;
int count = 0;
int rowcount = rowscheme.getSchema().length;
if ((rowscheme.getCutOff() != -1) && (rows > 0)) {
rowcount += rows - rowscheme.getCutOff() + 1;
}
do {
data = imp.readRow(colscheme, rowscheme);
if ((((rows > 0) && (count < rowcount)) || (count == 0)) && (data == null) && (rewind.equals(AUTOMATIC))) {
openImport();
initImport();
count = 0;
data = imp.readRow(colscheme, rowscheme);
}
if (data != null) {
if (multi) {
outputAtNode(Math.min(count, getTask().getDataOutputNodeCount()), data, true);
} else {
output(data);
}
count++;
}
} while (data != null);
if ((!onesuccess) && (count < rowcount)) {
throw (new RuntimeException("Data set exceeds input stream size"));
} else {
onesuccess = true;
}
}
/**
* Called when the unit is created. Initialises the unit's properties and parameters.
*/
public void init() {
super.init();
// Initialise node properties
setDefaultInputNodes(0);
setMinimumInputNodes(0);
setMaximumInputNodes(0);
setDefaultOutputNodes(1);
setMinimumOutputNodes(0);
setMaximumOutputNodes(Integer.MAX_VALUE);
// Initialise parameter update policy
setParameterUpdatePolicy(Task.PROCESS_UPDATE);
setPopUpDescription("Imports raw data from a binary file");
setHelpFileLocation("BinaryImporter.html");
// Initialise task parameters with default values (if not already initialised)
Task task = getTask();
if (!task.isParameterName("datatype")) {
task.setParameter("datatype", DOUBLE);
}
if (!task.isParameterName("columns")) {
task.setParameter("columns", "1");
}
if (!task.isParameterName("rows")) {
task.setParameter("rows", "1");
}
if (!task.isParameterName("columnbytes")) {
task.setParameter("columnbytes", SAME_AS_FOR_DATA_TYPE);
}
if (!task.isParameterName("rowschema")) {
task.setParameter("rowschema", "");
}
if (!task.isParameterName("columnschema")) {
task.setParameter("columnschema", "");
}
if (!task.isParameterName("reversebytes")) {
task.setParameter("reversebytes", "false");
}
if (!task.isParameterName("filename")) {
task.setParameter("filename", "");
}
if (!task.isParameterName("offset")) {
task.setParameter("offset", "0");
}
if (!task.isParameterName("extract")) {
task.setParameter("extract", COLUMNS);
}
if (!task.isParameterName("multi")) {
task.setParameter("multi", "false");
}
if (!task.isParameterName("rewind")) {
task.setParameter("rewind", EVERY_RUN);
}
if (!task.isParameterName("iteroffset")) {
task.setParameter("iteroffset", "false");
}
// Initialise GUI builder interface
String guilines = "";
guilines += "Filename $title filename File null *.*\n";
guilines += "Data type $title datatype Choice [" + DOUBLE + "] [" + FLOAT + "] [" + LONG + "] [" + INT + "] ["
+ SHORT + "] [" + BYTE + "]\n";
guilines += "Bytes per column $title columnbytes Choice [" + ONE_BYTE_PER_COLUMN + "] [" + SAME_AS_FOR_DATA_TYPE
+ "]\n";
guilines += "Extract $title extract Choice [" + COLUMNS + "] [" + ROWS + "]\n";
guilines += "Header offset (bytes) $title offset TextField 0\n";
guilines += "Number of columns $title columns TextField 1\n";
guilines += "Number of rows $title rows TextField 1\n";
guilines += "Extract columns (e.g. 1,3-12,15+) $title columnschema TextField \n";
guilines += "Extract rows (e.g. 1,3-12,15+) $title rowschema TextField \n";
guilines += "Reverse byte order $title reversebytes Checkbox false\n";
guilines += "Output on multiple nodes $title multi Checkbox false\n";
guilines += "Header offset every iteration $title iteroffset Checkbox false\n";
guilines += "Rewind input stream $title rewind Choice [" + NEVER_REWIND + "] [" + AUTOMATIC + "] [" + EVERY_RUN
+ "]\n";
setGUIBuilderV2Info(guilines);
task.addTaskListener(this);
}
/**
* Called when the unit is reset.
*/
public void reset() {
// Set unit parameters to the values specified by the task definition
Task task = getTask();
datatype = (String) task.getParameter("datatype");
columns = new Integer((String) task.getParameter("columns")).intValue();
rows = new Integer((String) task.getParameter("rows")).intValue();
columnbytes = (String) task.getParameter("columnbytes");
rowschema = (String) task.getParameter("rowschema");
columnschema = (String) task.getParameter("columnschema");
reversebytes = new Boolean((String) task.getParameter("reversebytes")).booleanValue();
filename = (String) task.getParameter("filename");
offset = new Integer((String) task.getParameter("offset")).intValue();
extract = (String) task.getParameter("extract");
multi = new Boolean((String) task.getParameter("multi")).booleanValue();
rewind = (String) task.getParameter("rewind");
iteroffset = new Boolean((String) task.getParameter("iteroffset")).booleanValue();
}
/**
* Called when the unit is disposed of.
*/
public void dispose() {
// Insert code to clean-up BinaryImporter (e.g. close open files)
}
/**
* Called a parameters is updated (e.g. by the GUI)
*/
public void parameterUpdate(String paramname, Object value) {
// Code to update local variables
if (paramname.equals("datatype")) {
datatype = (String) value;
}
if (paramname.equals("columns")) {
columns = new Integer((String) value).intValue();
}
if (paramname.equals("rows")) {
if (((String) value).trim().equals("")) {
rows = 0;
} else {
rows = new Integer((String) value).intValue();
}
}
if (paramname.equals("columnbytes")) {
columnbytes = (String) value;
}
if (paramname.equals("rowschema")) {
rowschema = (String) value;
}
if (paramname.equals("columnschema")) {
columnschema = (String) value;
}
if (paramname.equals("reversebytes")) {
reversebytes = new Boolean((String) value).booleanValue();
}
if (paramname.equals("filename")) {
filename = (String) value;
}
if (paramname.equals("offset")) {
offset = new Integer((String) value).intValue();
}
if (paramname.equals("extract")) {
extract = (String) value;
}
if (paramname.equals("multi")) {
multi = new Boolean((String) value).booleanValue();
}
if (paramname.equals("rewind")) {
rewind = (String) value;
}
if (paramname.equals("iteroffset")) {
iteroffset = new Boolean((String) value).booleanValue();
}
if (columnschema != null && (columnschema.indexOf('+') > -1) && (!columnschema.endsWith("+"))) {
setParameter("columnschema", columnschema.substring(0, columnschema.indexOf('+') + 1));
} else if (rowschema != null && (rowschema.indexOf('+') > -1) && (!rowschema.endsWith("+"))) {
setParameter("rowschema", rowschema.substring(0, rowschema.indexOf('+') + 1));
}
}
/**
* @return an array of the input types for BinaryImporter
*/
public String[] getInputTypes() {
return new String[]{};
}
/**
* @return an array of the output types for BinaryImporter
*/
public String[] getOutputTypes() {
return new String[]{"VectorType"};
}
/**
* Called when the value of a parameter is changed, including when a parameter is removed.
*/
public void parameterUpdated(ParameterUpdateEvent event) {
String paramname = event.getParameterName();
Task task = event.getTask();
try {
boolean reextract = paramname.equals("columnschema") || paramname.equals("rowschema") ||
paramname.equals("columns") || paramname.equals("rows") ||
paramname.equals("extract") || paramname.equals("multi");
if (reextract) {
if (new Boolean((String) task.getParameter("multi")).booleanValue()) {
DataSchema schema;
int len;
int nodecount;
if (task.getParameter("extract").equals(COLUMNS)) {
schema = DataSchema.getDataSchema((String) getTask().getParameter("columnschema"));
len = new Integer((String) getTask().getParameter("columns")).intValue();
} else {
schema = DataSchema.getDataSchema((String) getTask().getParameter("rowschema"));
len = new Integer((String) getTask().getParameter("rows")).intValue();
}
if (schema.getCutOff() > -1) {
nodecount = schema.getSchema().length + len - schema.getCutOff() + 1;
} else {
nodecount = schema.getSchema().length;
}
while (getTask().getDataOutputNodeCount() < nodecount) {
getTask().addDataOutputNode();
}
while (getTask().getDataOutputNodeCount() > Math.max(nodecount, 1)) {
getTask().removeDataOutputNode(getTask().getDataOutputNode(nodecount));
}
} else if (paramname.equals("multi")) {
while (getTask().getDataOutputNodeCount() > 1) {
getTask().removeDataOutputNode(getTask().getDataOutputNode(1));
}
}
}
} catch (NumberFormatException except) {
} catch (NodeException except) {
notifyError(except.getMessage());
}
}
/**
* Called when the core properties of a task change i.e. its name, whether it is running continuously etc.
*/
public void taskPropertyUpdate(TaskPropertyEvent event) {
}
/**
* Called when a data input node is added.
*/
public void nodeAdded(TaskNodeEvent event) {
}
/**
* Called before a data input node is removed.
*/
public void nodeRemoved(TaskNodeEvent event) {
}
/**
* Called before the task is disposed
*/
public void taskDisposed(TaskDisposedEvent event) {
}
}
| |
package org.wso2.carbon.apimgt.rest.api.admin.v1.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import javax.validation.constraints.*;
import io.swagger.annotations.*;
import java.util.Objects;
import javax.xml.bind.annotation.*;
import org.wso2.carbon.apimgt.rest.api.util.annotations.Scope;
public class WorkflowInfoDTO {
@XmlType(name="WorkflowTypeEnum")
@XmlEnum(String.class)
public enum WorkflowTypeEnum {
@XmlEnumValue("APPLICATION_CREATION") APPLICATION_CREATION(String.valueOf("APPLICATION_CREATION")), @XmlEnumValue("SUBSCRIPTION_CREATION") SUBSCRIPTION_CREATION(String.valueOf("SUBSCRIPTION_CREATION")), @XmlEnumValue("USER_SIGNUP") USER_SIGNUP(String.valueOf("USER_SIGNUP")), @XmlEnumValue("APPLICATION_REGISTRATION_PRODUCTION") APPLICATION_REGISTRATION_PRODUCTION(String.valueOf("APPLICATION_REGISTRATION_PRODUCTION")), @XmlEnumValue("APPLICATION_REGISTRATION_SANDBOX") APPLICATION_REGISTRATION_SANDBOX(String.valueOf("APPLICATION_REGISTRATION_SANDBOX")), @XmlEnumValue("APPLICATION_DELETION") APPLICATION_DELETION(String.valueOf("APPLICATION_DELETION")), @XmlEnumValue("API_STATE") API_STATE(String.valueOf("API_STATE")), @XmlEnumValue("SUBSCRIPTION_DELETION") SUBSCRIPTION_DELETION(String.valueOf("SUBSCRIPTION_DELETION"));
private String value;
WorkflowTypeEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
public static WorkflowTypeEnum fromValue(String v) {
for (WorkflowTypeEnum b : WorkflowTypeEnum.values()) {
if (String.valueOf(b.value).equals(v)) {
return b;
}
}
return null;
}
}
private WorkflowTypeEnum workflowType = null;
@XmlType(name="WorkflowStatusEnum")
@XmlEnum(String.class)
public enum WorkflowStatusEnum {
@XmlEnumValue("APPROVED") APPROVED(String.valueOf("APPROVED")), @XmlEnumValue("CREATED") CREATED(String.valueOf("CREATED"));
private String value;
WorkflowStatusEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
public static WorkflowStatusEnum fromValue(String v) {
for (WorkflowStatusEnum b : WorkflowStatusEnum.values()) {
if (String.valueOf(b.value).equals(v)) {
return b;
}
}
return null;
}
}
private WorkflowStatusEnum workflowStatus = null;
private String createdTime = null;
private String updatedTime = null;
private String referenceId = null;
private Object properties = null;
private String description = null;
/**
* Type of the Workflow Request. It shows which type of request is it.
**/
public WorkflowInfoDTO workflowType(WorkflowTypeEnum workflowType) {
this.workflowType = workflowType;
return this;
}
@ApiModelProperty(example = "APPLICATION_CREATION", value = "Type of the Workflow Request. It shows which type of request is it. ")
@JsonProperty("workflowType")
public WorkflowTypeEnum getWorkflowType() {
return workflowType;
}
public void setWorkflowType(WorkflowTypeEnum workflowType) {
this.workflowType = workflowType;
}
/**
* Show the Status of the the workflow request whether it is approved or created.
**/
public WorkflowInfoDTO workflowStatus(WorkflowStatusEnum workflowStatus) {
this.workflowStatus = workflowStatus;
return this;
}
@ApiModelProperty(example = "APPROVED", value = "Show the Status of the the workflow request whether it is approved or created. ")
@JsonProperty("workflowStatus")
public WorkflowStatusEnum getWorkflowStatus() {
return workflowStatus;
}
public void setWorkflowStatus(WorkflowStatusEnum workflowStatus) {
this.workflowStatus = workflowStatus;
}
/**
* Time of the the workflow request created.
**/
public WorkflowInfoDTO createdTime(String createdTime) {
this.createdTime = createdTime;
return this;
}
@ApiModelProperty(example = "2020-02-10 10:10:19.704", value = "Time of the the workflow request created. ")
@JsonProperty("createdTime")
public String getCreatedTime() {
return createdTime;
}
public void setCreatedTime(String createdTime) {
this.createdTime = createdTime;
}
/**
* Time of the the workflow request updated.
**/
public WorkflowInfoDTO updatedTime(String updatedTime) {
this.updatedTime = updatedTime;
return this;
}
@ApiModelProperty(example = "2020-02-10 10:10:19.704", value = "Time of the the workflow request updated. ")
@JsonProperty("updatedTime")
public String getUpdatedTime() {
return updatedTime;
}
public void setUpdatedTime(String updatedTime) {
this.updatedTime = updatedTime;
}
/**
* Workflow external reference is used to identify the workflow requests uniquely.
**/
public WorkflowInfoDTO referenceId(String referenceId) {
this.referenceId = referenceId;
return this;
}
@ApiModelProperty(example = "5871244b-d6f3-466e-8995-8accd1e64303", value = "Workflow external reference is used to identify the workflow requests uniquely. ")
@JsonProperty("referenceId")
public String getReferenceId() {
return referenceId;
}
public void setReferenceId(String referenceId) {
this.referenceId = referenceId;
}
/**
**/
public WorkflowInfoDTO properties(Object properties) {
this.properties = properties;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("properties")
public Object getProperties() {
return properties;
}
public void setProperties(Object properties) {
this.properties = properties;
}
/**
* description is a message with basic details about the workflow request.
**/
public WorkflowInfoDTO description(String description) {
this.description = description;
return this;
}
@ApiModelProperty(example = "Approve application [APP1] creation request from application creator - admin with throttling tier - 10MinPer", value = "description is a message with basic details about the workflow request. ")
@JsonProperty("description")
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WorkflowInfoDTO workflowInfo = (WorkflowInfoDTO) o;
return Objects.equals(workflowType, workflowInfo.workflowType) &&
Objects.equals(workflowStatus, workflowInfo.workflowStatus) &&
Objects.equals(createdTime, workflowInfo.createdTime) &&
Objects.equals(updatedTime, workflowInfo.updatedTime) &&
Objects.equals(referenceId, workflowInfo.referenceId) &&
Objects.equals(properties, workflowInfo.properties) &&
Objects.equals(description, workflowInfo.description);
}
@Override
public int hashCode() {
return Objects.hash(workflowType, workflowStatus, createdTime, updatedTime, referenceId, properties, description);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class WorkflowInfoDTO {\n");
sb.append(" workflowType: ").append(toIndentedString(workflowType)).append("\n");
sb.append(" workflowStatus: ").append(toIndentedString(workflowStatus)).append("\n");
sb.append(" createdTime: ").append(toIndentedString(createdTime)).append("\n");
sb.append(" updatedTime: ").append(toIndentedString(updatedTime)).append("\n");
sb.append(" referenceId: ").append(toIndentedString(referenceId)).append("\n");
sb.append(" properties: ").append(toIndentedString(properties)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.glacier.model;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.glacier.AmazonGlacier#setVaultNotifications(SetVaultNotificationsRequest) SetVaultNotifications operation}.
* <p>
* This operation configures notifications that will be sent when specific events happen to a vault. By default, you don't get any notifications.
* </p>
* <p>
* To configure vault notifications, send a PUT request to the <code>notification-configuration</code> subresource of the vault. The request should
* include a JSON document that provides an Amazon SNS topic and specific events for which you want Amazon Glacier to send notifications to the topic.
* </p>
* <p>
* Amazon SNS topics must grant permission to the vault to be allowed to publish notifications to the topic. You can configure a vault to publish a
* notification for the following vault events:
* </p>
*
* <ul>
* <li> <b>ArchiveRetrievalCompleted</b> This event occurs when a job that was initiated for an archive retrieval is completed (InitiateJob). The status
* of the completed job can be "Succeeded" or "Failed". The notification sent to the SNS topic is the same output as returned from DescribeJob. </li>
* <li> <b>InventoryRetrievalCompleted</b> This event occurs when a job that was initiated for an inventory retrieval is completed (InitiateJob). The
* status of the completed job can be "Succeeded" or "Failed". The notification sent to the SNS topic is the same output as returned from DescribeJob.
* </li>
*
* </ul>
* <p>
* An AWS account has full permission to perform all operations (actions). However, AWS Identity and Access Management (IAM) users don't have any
* permissions by default. You must grant them explicit permission to perform specific actions. For more information, see <a
* href="http://docs.amazonwebservices.com/amazonglacier/latest/dev/using-iam-with-amazon-glacier.html"> Access Control Using AWS Identity and Access
* Management (IAM) </a> .
* </p>
* <p>
* For conceptual information and underlying REST API, go to <a
* href="http://docs.amazonwebservices.com/amazonglacier/latest/dev/configuring-notifications.html"> Configuring Vault Notifications in Amazon Glacier
* </a> and <a href="http://docs.amazonwebservices.com/amazonglacier/latest/dev/api-vault-notifications-put.html"> Set Vault Notification Configuration
* </a> in the <i>Amazon Glacier Developer Guide</i> .
*
* </p>
*
* @see com.amazonaws.services.glacier.AmazonGlacier#setVaultNotifications(SetVaultNotificationsRequest)
*/
public class SetVaultNotificationsRequest extends AmazonWebServiceRequest {
/**
* The <code>AccountId</code> is the AWS Account ID. You can specify
* either the AWS Account ID or optionally a '-', in which case Amazon
* Glacier uses the AWS Account ID associated with the credentials used
* to sign the request. If you specify your Account ID, do not include
* hyphens in it.
*/
private String accountId;
/**
* The name of the vault.
*/
private String vaultName;
/**
* Provides options for specifying notification configuration.
*/
private VaultNotificationConfig vaultNotificationConfig;
/**
* Default constructor for a new SetVaultNotificationsRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize this object after creating it.
*/
public SetVaultNotificationsRequest() {}
/**
* Constructs a new SetVaultNotificationsRequest object.
* Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param vaultName The name of the vault.
* @param vaultNotificationConfig Provides options for specifying
* notification configuration.
*/
public SetVaultNotificationsRequest(String vaultName, VaultNotificationConfig vaultNotificationConfig) {
this.vaultName = vaultName;
this.vaultNotificationConfig = vaultNotificationConfig;
}
/**
* Constructs a new SetVaultNotificationsRequest object.
* Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param accountId The <code>AccountId</code> is the AWS Account ID. You
* can specify either the AWS Account ID or optionally a '-', in which
* case Amazon Glacier uses the AWS Account ID associated with the
* credentials used to sign the request. If you specify your Account ID,
* do not include hyphens in it.
* @param vaultName The name of the vault.
* @param vaultNotificationConfig Provides options for specifying
* notification configuration.
*/
public SetVaultNotificationsRequest(String accountId, String vaultName, VaultNotificationConfig vaultNotificationConfig) {
this.accountId = accountId;
this.vaultName = vaultName;
this.vaultNotificationConfig = vaultNotificationConfig;
}
/**
* The <code>AccountId</code> is the AWS Account ID. You can specify
* either the AWS Account ID or optionally a '-', in which case Amazon
* Glacier uses the AWS Account ID associated with the credentials used
* to sign the request. If you specify your Account ID, do not include
* hyphens in it.
*
* @return The <code>AccountId</code> is the AWS Account ID. You can specify
* either the AWS Account ID or optionally a '-', in which case Amazon
* Glacier uses the AWS Account ID associated with the credentials used
* to sign the request. If you specify your Account ID, do not include
* hyphens in it.
*/
public String getAccountId() {
return accountId;
}
/**
* The <code>AccountId</code> is the AWS Account ID. You can specify
* either the AWS Account ID or optionally a '-', in which case Amazon
* Glacier uses the AWS Account ID associated with the credentials used
* to sign the request. If you specify your Account ID, do not include
* hyphens in it.
*
* @param accountId The <code>AccountId</code> is the AWS Account ID. You can specify
* either the AWS Account ID or optionally a '-', in which case Amazon
* Glacier uses the AWS Account ID associated with the credentials used
* to sign the request. If you specify your Account ID, do not include
* hyphens in it.
*/
public void setAccountId(String accountId) {
this.accountId = accountId;
}
/**
* The <code>AccountId</code> is the AWS Account ID. You can specify
* either the AWS Account ID or optionally a '-', in which case Amazon
* Glacier uses the AWS Account ID associated with the credentials used
* to sign the request. If you specify your Account ID, do not include
* hyphens in it.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param accountId The <code>AccountId</code> is the AWS Account ID. You can specify
* either the AWS Account ID or optionally a '-', in which case Amazon
* Glacier uses the AWS Account ID associated with the credentials used
* to sign the request. If you specify your Account ID, do not include
* hyphens in it.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SetVaultNotificationsRequest withAccountId(String accountId) {
this.accountId = accountId;
return this;
}
/**
* The name of the vault.
*
* @return The name of the vault.
*/
public String getVaultName() {
return vaultName;
}
/**
* The name of the vault.
*
* @param vaultName The name of the vault.
*/
public void setVaultName(String vaultName) {
this.vaultName = vaultName;
}
/**
* The name of the vault.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param vaultName The name of the vault.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SetVaultNotificationsRequest withVaultName(String vaultName) {
this.vaultName = vaultName;
return this;
}
/**
* Provides options for specifying notification configuration.
*
* @return Provides options for specifying notification configuration.
*/
public VaultNotificationConfig getVaultNotificationConfig() {
return vaultNotificationConfig;
}
/**
* Provides options for specifying notification configuration.
*
* @param vaultNotificationConfig Provides options for specifying notification configuration.
*/
public void setVaultNotificationConfig(VaultNotificationConfig vaultNotificationConfig) {
this.vaultNotificationConfig = vaultNotificationConfig;
}
/**
* Provides options for specifying notification configuration.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param vaultNotificationConfig Provides options for specifying notification configuration.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SetVaultNotificationsRequest withVaultNotificationConfig(VaultNotificationConfig vaultNotificationConfig) {
this.vaultNotificationConfig = vaultNotificationConfig;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (accountId != null) sb.append("AccountId: " + accountId + ", ");
if (vaultName != null) sb.append("VaultName: " + vaultName + ", ");
if (vaultNotificationConfig != null) sb.append("VaultNotificationConfig: " + vaultNotificationConfig + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode());
hashCode = prime * hashCode + ((getVaultName() == null) ? 0 : getVaultName().hashCode());
hashCode = prime * hashCode + ((getVaultNotificationConfig() == null) ? 0 : getVaultNotificationConfig().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof SetVaultNotificationsRequest == false) return false;
SetVaultNotificationsRequest other = (SetVaultNotificationsRequest)obj;
if (other.getAccountId() == null ^ this.getAccountId() == null) return false;
if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false) return false;
if (other.getVaultName() == null ^ this.getVaultName() == null) return false;
if (other.getVaultName() != null && other.getVaultName().equals(this.getVaultName()) == false) return false;
if (other.getVaultNotificationConfig() == null ^ this.getVaultNotificationConfig() == null) return false;
if (other.getVaultNotificationConfig() != null && other.getVaultNotificationConfig().equals(this.getVaultNotificationConfig()) == false) return false;
return true;
}
}
| |
package ikube.web.service;
import com.google.common.collect.Lists;
import ikube.IConstants;
import ikube.model.Api;
import ikube.model.ApiMethod;
import ikube.toolkit.OBJECT;
import ikube.toolkit.STRING;
import org.apache.commons.lang.StringUtils;
import org.reflections.Reflections;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Set;
import java.util.regex.Pattern;
import static ikube.toolkit.OBJECT.getObject;
import static ikube.toolkit.OBJECT.getPrimitive;
/**
* This rest web service exposes the web services that are annotated with the {@link ikube.web.service.Api}
* annotation. This annotation contains the types of parameters for the web service, the type of operation, GET for example,
* and what type of data it produces, including an example of that object/data.
*
* @author Michael couck
* @version 01.00
* @since 11-07-2014
*/
@Component
@Path(ApiDocs.API)
@Scope(ApiDocs.REQUEST)
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
@ikube.web.service.Api(description = "This resource is the rest service that provides the description of the other rest resources")
public class ApiDocs extends Resource {
public static final String API = "/api";
public static final String APIS = "/apis";
private static final Pattern OPTIONS = Pattern.compile(".*(GET).*|.*(POST).*|.*(PUT).*|.*(OPTIONS).*|.*(HEAD).*|.*(DELETE).*");
/**
* This method returns all the api objects that have been scanned, as a collection. The collection, as
* a Json representation, can then be displayed on a web page as living documentation of the exposed apis
* of the system.
*
* @return the Json representation of the collection of {@link ikube.web.service.Api}s describing the rest services
*/
@GET
@Path(APIS)
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
@ikube.web.service.Api(
description = "This method will return all the apis in the system as a collection, Jsonified",
produces = ArrayList.class)
public Response apis() {
ArrayList<Api> apis = Lists.newArrayList();
String packageName = this.getClass().getPackage().getName();
Set<Class<?>> resources = new Reflections(packageName).getTypesAnnotatedWith(ikube.web.service.Api.class);
for (final Class<?> resource : resources) {
Api api = getApi(resource);
apis.add(api);
}
return buildResponse(apis);
}
/**
* Similar to the above, but returning only one api, based on the name in the parameter list.
*
* @param apiName the name of the api to get
* @return the Json representation of the {@link ikube.web.service.Api} describing the rest service
*/
@GET
@ikube.web.service.Api(
description = "This method will return one api in the system, Jsonified, specified by the parameter name",
produces = Api.class)
public Response api(@QueryParam(value = IConstants.NAME) final String apiName) {
try {
Class<?> resource = Class.forName(apiName);
Api api = getApi(resource);
return buildResponse(api);
} catch (final ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
Api getApi(final Class<?> resource) {
if (!resource.isAnnotationPresent(ikube.web.service.Api.class)) {
return null;
}
ikube.web.service.Api apiAnnotation = resource.getAnnotation(ikube.web.service.Api.class);
Path pathAnnotation = resource.getAnnotation(Path.class);
Consumes consumesAnnotation = resource.getAnnotation(Consumes.class);
Produces producesAnnotation = resource.getAnnotation(Produces.class);
String basePath = pathAnnotation != null ? pathAnnotation.value() : IConstants.SEP + IConstants.IKUBE + IConstants.SEP + IConstants.SERVICE;
String consumesType = consumesAnnotation != null ? Arrays.toString(consumesAnnotation.value()) : "";
String producesType = producesAnnotation != null ? Arrays.toString(producesAnnotation.value()) : "";
Api api = new Api();
api.setApi(resource.getSimpleName());
api.setDescription(apiAnnotation.description());
// Only public methods
Method[] methods = resource.getDeclaredMethods();
for (final Method method : methods) {
if (!method.isAnnotationPresent(ikube.web.service.Api.class)) {
continue;
}
ApiMethod apiMethod = new ApiMethod();
ikube.web.service.Api apiMethodAnnotation = method.getAnnotation(ikube.web.service.Api.class);
// The description must be declared in the annotation of course
apiMethod.setDescription(apiMethodAnnotation.description());
// Set the path for this method
setMethodPath(apiMethod, method, basePath, apiMethodAnnotation);
// The type of the method, Get, Post, etc.
setMethodType(apiMethod, method, apiMethodAnnotation);
// Set what the methods consume as types and produce as types
setConsumesAndProducesTypes(apiMethod, method, consumesType, producesType);
try {
setConsumes(apiMethod, method, apiMethodAnnotation);
setProduces(apiMethod, method, apiMethodAnnotation);
} catch (final InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e);
}
api.getApiMethods().add(apiMethod);
}
return api;
}
void setMethodPath(final ApiMethod apiMethod, final Method method, final String basePath, final ikube.web.service.Api apiMethodAnnotation) {
// If the uri annotation is set then this overrides the construction of the uri
if (StringUtils.isNotEmpty(apiMethodAnnotation.uri())) {
apiMethod.setUri(apiMethodAnnotation.uri());
} else {
if (!method.isAnnotationPresent(Path.class)) {
apiMethod.setUri(basePath);
} else {
apiMethod.setUri(basePath + method.getAnnotation(Path.class).value());
}
}
}
void setMethodType(final ApiMethod apiMethod, final Method method, final ikube.web.service.Api apiMethodAnnotation) {
if (StringUtils.isNotEmpty(apiMethodAnnotation.type())) {
apiMethod.setMethod(apiMethodAnnotation.type());
} else {
Annotation[] methodAnnotations = method.getAnnotations();
if (methodAnnotations != null) {
for (final Annotation methodAnnotation : methodAnnotations) {
if (OPTIONS.matcher(methodAnnotation.toString()).matches()) {
apiMethod.setMethod(STRING.stripToAlphaNumeric(methodAnnotation.toString()));
}
}
}
}
}
void setConsumesAndProducesTypes(final ApiMethod apiMethod, final Method method, final String consumesType, final String producesType) {
// And now for the consumes and produces
if (!method.isAnnotationPresent(Consumes.class)) {
apiMethod.setConsumesType(consumesType);
} else {
apiMethod.setConsumesType(Arrays.toString(method.getAnnotation(Consumes.class).value()));
}
if (!method.isAnnotationPresent(Produces.class)) {
apiMethod.setProducesType(producesType);
} else {
apiMethod.setProducesType(Arrays.toString(method.getAnnotation(Produces.class).value()));
}
}
void setConsumes(final ApiMethod apiMethod, final Method method, final ikube.web.service.Api apiMethodAnnotation)
throws IllegalAccessException, InstantiationException {
if (apiMethodAnnotation.consumes() != Object.class) {
if (apiMethodAnnotation.consumes().isArray()
|| !Modifier.isPublic(apiMethodAnnotation.consumes().getModifiers())
|| Void.class.isAssignableFrom(apiMethodAnnotation.consumes())) {
apiMethod.setConsumes(apiMethodAnnotation.consumes().toString());
} else {
apiMethod.setConsumes(populateFields(apiMethodAnnotation.consumes()));
}
} else {
// Here we build the consumes and produces from the parameters and return value
Class<?>[] parameterTypes = method.getParameterTypes();
Object[] parameters = new Object[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
Class<?> parameterType = parameterTypes[i];
if (parameterType.isPrimitive()) {
parameters[i] = getPrimitive(parameterType);
} else {
if (String.class.isAssignableFrom(parameterType)) {
parameters[i] = "string";
} else if (!parameterType.isInterface()) {
parameters[i] = populateFields(parameterType);
}
}
}
apiMethod.setConsumes(parameters);
}
}
void setProduces(final ApiMethod apiMethod, final Method method, final ikube.web.service.Api apiMethodAnnotation)
throws IllegalAccessException, InstantiationException {
if (apiMethodAnnotation.produces() != Object.class) {
if (apiMethodAnnotation.produces().isArray()
|| !Modifier.isPublic(apiMethodAnnotation.produces().getModifiers())
|| Void.class.isAssignableFrom(apiMethodAnnotation.produces())) {
apiMethod.setProduces(apiMethodAnnotation.produces().toString());
} else {
apiMethod.setProduces(populateFields(apiMethodAnnotation.produces()));
}
} else {
// Here we build the response somehow, it looks like the only way is to specify it in the annotation
// if the response is not a type but a {@link Response} object, with no typing etc.
Class<?> returnType = method.getReturnType();
if (String.class.isAssignableFrom(returnType)) {
apiMethod.setProduces("string");
} else if (!Void.class.isAssignableFrom(returnType)
&& !Response.class.isAssignableFrom(returnType)
&& !returnType.isInterface()) {
apiMethod.setProduces(populateFields(returnType));
}
}
}
private Object populateFields(final Class<?> clazz) throws IllegalAccessException, InstantiationException {
Object target = getObject(clazz);
if (target == null) {
return null;
}
return OBJECT.populateFields(target, true, 5, "parent", "id");
}
}
| |
/*
* Copyright 2006-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.dsl.design;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import com.consol.citrus.TestCase;
import com.consol.citrus.actions.ReceiveMessageAction;
import com.consol.citrus.container.SequenceAfterTest;
import com.consol.citrus.container.SequenceBeforeTest;
import com.consol.citrus.dsl.TestRequest;
import com.consol.citrus.dsl.UnitTestSupport;
import com.consol.citrus.endpoint.Endpoint;
import com.consol.citrus.message.DefaultMessage;
import com.consol.citrus.message.MessageType;
import com.consol.citrus.report.TestActionListeners;
import com.consol.citrus.script.ScriptTypes;
import com.consol.citrus.spi.ReferenceResolver;
import com.consol.citrus.validation.builder.DefaultMessageBuilder;
import com.consol.citrus.validation.builder.StaticMessageBuilder;
import com.consol.citrus.validation.ValidationProcessor;
import com.consol.citrus.validation.context.HeaderValidationContext;
import com.consol.citrus.validation.json.JsonMessageValidationContext;
import com.consol.citrus.validation.json.JsonPathMessageValidationContext;
import com.consol.citrus.validation.json.JsonPathVariableExtractor;
import com.consol.citrus.validation.script.GroovyJsonMessageValidator;
import com.consol.citrus.validation.script.ScriptValidationContext;
import com.consol.citrus.validation.text.PlainTextMessageValidator;
import com.consol.citrus.validation.xml.XmlMessageValidationContext;
import com.consol.citrus.validation.xml.XpathMessageValidationContext;
import com.consol.citrus.validation.xml.XpathPayloadVariableExtractor;
import com.consol.citrus.variable.MessageHeaderVariableExtractor;
import com.consol.citrus.variable.dictionary.DataDictionary;
import com.consol.citrus.variable.dictionary.xml.NodeMappingDataDictionary;
import org.hamcrest.core.StringContains;
import org.mockito.Mockito;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.oxm.Marshaller;
import org.springframework.oxm.xstream.XStreamMarshaller;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.hamcrest.Matchers.containsString;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.when;
/**
* @author Christoph Deppisch
*/
public class ReceiveMessageTestDesignerTest extends UnitTestSupport {
private Endpoint messageEndpoint = Mockito.mock(Endpoint.class);
private Resource resource = Mockito.mock(Resource.class);
private ReferenceResolver referenceResolver = Mockito.mock(ReferenceResolver.class);
private XStreamMarshaller marshaller = new XStreamMarshaller();
@BeforeClass
public void prepareMarshaller() {
marshaller.getXStream().processAnnotations(TestRequest.class);
}
@Test
public void testReceiveEmpty() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
}
@Test
public void testReceiveBuilder() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.message(new DefaultMessage("Foo").setHeader("operation", "foo"));
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.PLAINTEXT.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof StaticMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "Foo");
Assert.assertNotNull(((StaticMessageBuilder)action.getMessageBuilder()).getMessage().getHeader("operation"));
}
@Test
public void testReceiveBuilderWithPayloadModel() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(Marshaller.class)).thenReturn(Collections.<String, Marshaller>singletonMap("marshaller", marshaller));
when(referenceResolver.resolve(Marshaller.class)).thenReturn(marshaller);
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payloadModel(new TestRequest("Hello Citrus!"));
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello Citrus!</Message></TestRequest>");
}
@Test
public void testReceiveBuilderWithPayloadModelExplicitMarshaller() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload(new TestRequest("Hello Citrus!"), marshaller);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello Citrus!</Message></TestRequest>");
}
@Test
public void testReceiveBuilderWithPayloadModelExplicitMarshallerName() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.isResolvable("myMarshaller")).thenReturn(true);
when(referenceResolver.resolve("myMarshaller", Marshaller.class)).thenReturn(marshaller);
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload(new TestRequest("Hello Citrus!"), "myMarshaller");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello Citrus!</Message></TestRequest>");
}
@Test
public void testReceiveBuilderWithPayloadString() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
}
@Test
public void testReceiveBuilderWithPayloadResource() throws IOException {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload(resource);
}
};
reset(resource);
when(resource.getInputStream()).thenReturn(new ByteArrayInputStream("somePayload".getBytes()));
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "somePayload");
}
@Test
public void testReceiveBuilderWithEndpointName() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive("fooMessageEndpoint")
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpointUri(), "fooMessageEndpoint");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
}
@Test
public void testReceiveBuilderWithTimeout() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.timeout(1000L);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getReceiveTimeout(), 1000L);
}
@Test
public void testReceiveBuilderWithHeaders() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.headers(Collections.singletonMap("some", "value"))
.header("operation", "sayHello")
.header("foo", "bar");
receive(messageEndpoint)
.header("operation", "sayHello")
.header("foo", "bar")
.headers(Collections.singletonMap("some", "value"))
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 2);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
Assert.assertEquals(test.getActions().get(1).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("some"));
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("foo"));
action = (ReceiveMessageAction) test.getActions().get(1);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("some"));
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("foo"));
}
@Test
public void testReceiveBuilderWithHeaderData() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.header("<Header><Name>operation</Name><Value>foo</Value></Header>");
receive(messageEndpoint)
.message(new DefaultMessage("<TestRequest><Message>Hello World!</Message></TestRequest>"))
.header("<Header><Name>operation</Name><Value>foo</Value></Header>");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 2);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
Assert.assertEquals(test.getActions().get(1).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 1L);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<Header><Name>operation</Name><Value>foo</Value></Header>");
action = (ReceiveMessageAction) test.getActions().get(1);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof StaticMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 1L);
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<Header><Name>operation</Name><Value>foo</Value></Header>");
}
@Test
public void testReceiveBuilderWithMultipleHeaderData() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.header("<Header><Name>operation</Name><Value>foo1</Value></Header>")
.header("<Header><Name>operation</Name><Value>foo2</Value></Header>");
receive(messageEndpoint)
.message(new DefaultMessage("<TestRequest><Message>Hello World!</Message></TestRequest>"))
.header("<Header><Name>operation</Name><Value>foo1</Value></Header>")
.header("<Header><Name>operation</Name><Value>foo2</Value></Header>");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 2);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
Assert.assertEquals(test.getActions().get(1).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 2L);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<Header><Name>operation</Name><Value>foo1</Value></Header>");
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(1), "<Header><Name>operation</Name><Value>foo2</Value></Header>");
action = (ReceiveMessageAction) test.getActions().get(1);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof StaticMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 2L);
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<Header><Name>operation</Name><Value>foo1</Value></Header>");
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(1), "<Header><Name>operation</Name><Value>foo2</Value></Header>");
}
@Test
public void testReceiveBuilderWithHeaderFragment() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(Marshaller.class)).thenReturn(Collections.<String, Marshaller>singletonMap("marshaller", marshaller));
when(referenceResolver.resolve(Marshaller.class)).thenReturn(marshaller);
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.headerFragment(new TestRequest("Hello Citrus!"));
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 1L);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<TestRequest><Message>Hello Citrus!</Message></TestRequest>");
}
@Test
public void testReceiveBuilderWithHeaderFragmentExplicitMarshaller() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.headerFragment(new TestRequest("Hello Citrus!"), marshaller);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 1L);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<TestRequest><Message>Hello Citrus!</Message></TestRequest>");
}
@Test
public void testReceiveBuilderWithHeaderFragmentExplicitMarshallerName() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.isResolvable("myMarshaller")).thenReturn(true);
when(referenceResolver.resolve("myMarshaller", Marshaller.class)).thenReturn(marshaller);
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.headerFragment(new TestRequest("Hello Citrus!"), "myMarshaller");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 1L);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<TestRequest><Message>Hello Citrus!</Message></TestRequest>");
}
@Test
public void testReceiveBuilderWithHeaderResource() throws IOException {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.header(resource);
receive(messageEndpoint)
.message(new DefaultMessage("<TestRequest><Message>Hello World!</Message></TestRequest>"))
.header(resource);
}
};
reset(resource);
when(resource.getInputStream()).thenReturn(new ByteArrayInputStream("someHeaderData".getBytes()))
.thenReturn(new ByteArrayInputStream("otherHeaderData".getBytes()));
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 2);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
Assert.assertEquals(test.getActions().get(1).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 1L);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "someHeaderData");
action = (ReceiveMessageAction) test.getActions().get(1);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof StaticMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 1L);
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "otherHeaderData");
}
@Test
public void testReceiveBuilderWithMultipleHeaderResource() throws IOException {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.header("<Header><Name>operation</Name><Value>foo</Value></Header>")
.header(resource);
receive(messageEndpoint)
.message(new DefaultMessage("<TestRequest><Message>Hello World!</Message></TestRequest>"))
.header("<Header><Name>operation</Name><Value>foo</Value></Header>")
.header(resource);
}
};
reset(resource);
when(resource.getInputStream()).thenReturn(new ByteArrayInputStream("someHeaderData".getBytes()))
.thenReturn(new ByteArrayInputStream("otherHeaderData".getBytes()));
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 2);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
Assert.assertEquals(test.getActions().get(1).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 2L);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<Header><Name>operation</Name><Value>foo</Value></Header>");
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(1), "someHeaderData");
action = (ReceiveMessageAction) test.getActions().get(1);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertTrue(action.getMessageBuilder() instanceof StaticMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).size(), 2L);
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(0), "<Header><Name>operation</Name><Value>foo</Value></Header>");
Assert.assertEquals(((StaticMessageBuilder)action.getMessageBuilder()).buildMessageHeaderData(context).get(1), "otherHeaderData");
}
@Test
public void testReceiveBuilderWithValidator() {
final PlainTextMessageValidator validator = new PlainTextMessageValidator();
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.PLAINTEXT)
.payload("TestMessage")
.header("operation", "sayHello")
.validator(validator);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.PLAINTEXT.name());
Assert.assertEquals(action.getValidators().size(), 1L);
Assert.assertEquals(action.getValidators().get(0), validator);
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "TestMessage");
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
}
@Test
public void testReceiveBuilderWithValidatorName() {
final PlainTextMessageValidator validator = new PlainTextMessageValidator();
reset(referenceResolver);
when(referenceResolver.resolve("plainTextValidator")).thenReturn(validator);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.PLAINTEXT)
.payload("TestMessage")
.header("operation", "sayHello")
.validator("plainTextValidator");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.PLAINTEXT.name());
Assert.assertEquals(action.getValidators().size(), 1L);
Assert.assertEquals(action.getValidators().get(0), validator);
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "TestMessage");
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
}
@Test
public void testReceiveBuilderWithDictionary() {
final DataDictionary<?> dictionary = new NodeMappingDataDictionary();
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.PLAINTEXT)
.payload("TestMessage")
.header("operation", "sayHello")
.dictionary(dictionary);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.PLAINTEXT.name());
Assert.assertEquals(action.getDataDictionary(), dictionary);
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "TestMessage");
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
}
@Test
public void testReceiveBuilderWithDictionaryName() {
final DataDictionary<?> dictionary = new NodeMappingDataDictionary();
reset(referenceResolver);
when(referenceResolver.resolve("customDictionary", DataDictionary.class)).thenReturn(dictionary);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.PLAINTEXT)
.payload("TestMessage")
.header("operation", "sayHello")
.dictionary("customDictionary");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.PLAINTEXT.name());
Assert.assertEquals(action.getDataDictionary(), dictionary);
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "TestMessage");
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
}
@Test
public void testReceiveBuilderWithSelector() {
final Map<String, String> messageSelector = new HashMap<>();
messageSelector.put("operation", "sayHello");
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.selector(messageSelector);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageSelectorMap(), messageSelector);
}
@Test
public void testReceiveBuilderWithSelectorExpression() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.selector("operation = 'sayHello'");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertTrue(action.getMessageSelectorMap().isEmpty());
Assert.assertEquals(action.getMessageSelector(), "operation = 'sayHello'");
}
@Test
public void testReceiveBuilderExtractFromPayload() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message lang=\"ENG\">Hello World!</Message></TestRequest>")
.extractFromPayload("/TestRequest/Message", "text")
.extractFromPayload("/TestRequest/Message/@lang", "language");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getVariableExtractors().size(), 2);
Assert.assertTrue(action.getVariableExtractors().get(0) instanceof XpathPayloadVariableExtractor);
Assert.assertTrue(action.getVariableExtractors().get(1) instanceof XpathPayloadVariableExtractor);
Assert.assertTrue(((XpathPayloadVariableExtractor)action.getVariableExtractors().get(0)).getXpathExpressions().containsKey("/TestRequest/Message"));
Assert.assertTrue(((XpathPayloadVariableExtractor)action.getVariableExtractors().get(1)).getXpathExpressions().containsKey("/TestRequest/Message/@lang"));
}
@Test
public void testReceiveBuilderExtractJsonPathFromPayload() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.JSON)
.payload("{\"text\":\"Hello World!\", \"person\":{\"name\":\"John\",\"surname\":\"Doe\"}, \"index\":5, \"id\":\"x123456789x\"}")
.extractFromPayload("$.text", "text")
.extractFromPayload("$.person", "person");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.JSON.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getVariableExtractors().size(), 2);
Assert.assertTrue(action.getVariableExtractors().get(0) instanceof JsonPathVariableExtractor);
Assert.assertTrue(action.getVariableExtractors().get(1) instanceof JsonPathVariableExtractor);
Assert.assertTrue(((JsonPathVariableExtractor)action.getVariableExtractors().get(0)).getJsonPathExpressions().containsKey("$.text"));
Assert.assertTrue(((JsonPathVariableExtractor)action.getVariableExtractors().get(1)).getJsonPathExpressions().containsKey("$.person"));
}
@Test
public void testReceiveBuilderExtractFromHeader() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message lang=\"ENG\">Hello World!</Message></TestRequest>")
.extractFromHeader("operation", "ops")
.extractFromHeader("requestId", "id");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getVariableExtractors().size(), 2);
Assert.assertTrue(action.getVariableExtractors().get(0) instanceof MessageHeaderVariableExtractor);
Assert.assertTrue(action.getVariableExtractors().get(1) instanceof MessageHeaderVariableExtractor);
Assert.assertTrue(((MessageHeaderVariableExtractor)action.getVariableExtractors().get(0)).getHeaderMappings().containsKey("operation"));
Assert.assertTrue(((MessageHeaderVariableExtractor)action.getVariableExtractors().get(1)).getHeaderMappings().containsKey("requestId"));
}
@Test
public void testReceiveBuilderExtractCombined() {
reset(referenceResolver);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message lang=\"ENG\">Hello World!</Message></TestRequest>")
.extractFromHeader("operation", "ops")
.extractFromHeader("requestId", "id")
.extractFromPayload("/TestRequest/Message", "text")
.extractFromPayload("/TestRequest/Message/@lang", "language");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getVariableExtractors().size(), 4);
Assert.assertTrue(action.getVariableExtractors().get(0) instanceof MessageHeaderVariableExtractor);
Assert.assertTrue(action.getVariableExtractors().get(1) instanceof MessageHeaderVariableExtractor);
Assert.assertTrue(((MessageHeaderVariableExtractor)action.getVariableExtractors().get(0)).getHeaderMappings().containsKey("operation"));
Assert.assertTrue(((MessageHeaderVariableExtractor)action.getVariableExtractors().get(1)).getHeaderMappings().containsKey("requestId"));
Assert.assertTrue(action.getVariableExtractors().get(2) instanceof XpathPayloadVariableExtractor);
Assert.assertTrue(action.getVariableExtractors().get(3) instanceof XpathPayloadVariableExtractor);
Assert.assertTrue(((XpathPayloadVariableExtractor)action.getVariableExtractors().get(2)).getXpathExpressions().containsKey("/TestRequest/Message"));
Assert.assertTrue(((XpathPayloadVariableExtractor)action.getVariableExtractors().get(3)).getXpathExpressions().containsKey("/TestRequest/Message/@lang"));
}
@Test
public void testReceiveBuilderWithValidationProcessor() {
final ValidationProcessor processor = Mockito.mock(ValidationProcessor.class);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.PLAINTEXT)
.payload("TestMessage")
.header("operation", "sayHello")
.validationCallback(processor);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.PLAINTEXT.name());
Assert.assertEquals(action.getValidationProcessor(), processor);
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "TestMessage");
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
}
@Test
public void testReceiveBuilderWithValidationScript() {
final GroovyJsonMessageValidator validator = new GroovyJsonMessageValidator();
reset(referenceResolver);
when(referenceResolver.resolve("groovyMessageValidator")).thenReturn(validator);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.JSON)
.validateScript("assert true")
.validator("groovyMessageValidator");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.JSON.name());
Assert.assertEquals(action.getValidators().size(), 1L);
Assert.assertEquals(action.getValidators().get(0), validator);
Assert.assertEquals(action.getValidationContexts().size(), 3L);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(ScriptValidationContext.class::isInstance));
ScriptValidationContext validationContext = action.getValidationContexts().stream()
.filter(ScriptValidationContext.class::isInstance).findFirst()
.map(ScriptValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertEquals(validationContext.getScriptType(), ScriptTypes.GROOVY);
Assert.assertEquals(validationContext.getValidationScript(), "assert true");
Assert.assertNull(validationContext.getValidationScriptResourcePath());
}
@Test
public void testReceiveBuilderWithValidationScriptResource() throws IOException {
final GroovyJsonMessageValidator validator = new GroovyJsonMessageValidator();
reset(referenceResolver);
when(referenceResolver.resolve("groovyMessageValidator")).thenReturn(validator);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.JSON)
.validateScript(new ClassPathResource("com/consol/citrus/dsl/runner/validation.groovy"))
.validator("groovyMessageValidator");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.JSON.name());
Assert.assertEquals(action.getValidators().size(), 1L);
Assert.assertEquals(action.getValidators().get(0), validator);
Assert.assertEquals(action.getValidationContexts().size(), 3L);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(ScriptValidationContext.class::isInstance));
ScriptValidationContext validationContext = action.getValidationContexts().stream()
.filter(ScriptValidationContext.class::isInstance).findFirst()
.map(ScriptValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertEquals(validationContext.getScriptType(), ScriptTypes.GROOVY);
Assert.assertEquals(validationContext.getValidationScript().trim(), "assert json.message == 'Hello Citrus!'");
Assert.assertNull(validationContext.getValidationScriptResourcePath());
}
@Test
public void testReceiveBuilderWithValidationScriptResourcePath() throws IOException {
final GroovyJsonMessageValidator validator = new GroovyJsonMessageValidator();
reset(referenceResolver);
when(referenceResolver.resolve("groovyMessageValidator")).thenReturn(validator);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.JSON)
.validateScriptResource("/path/to/file/File.groovy")
.validator("groovyMessageValidator");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.JSON.name());
Assert.assertEquals(action.getValidators().size(), 1L);
Assert.assertEquals(action.getValidators().get(0), validator);
Assert.assertEquals(action.getValidationContexts().size(), 3L);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(ScriptValidationContext.class::isInstance));
ScriptValidationContext validationContext = action.getValidationContexts().stream()
.filter(ScriptValidationContext.class::isInstance).findFirst()
.map(ScriptValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertEquals(validationContext.getScriptType(), ScriptTypes.GROOVY);
Assert.assertEquals(validationContext.getValidationScript(), "");
Assert.assertEquals(validationContext.getValidationScriptResourcePath(), "/path/to/file/File.groovy");
}
@Test
public void testReceiveBuilderWithValidationScriptAndHeader() {
final GroovyJsonMessageValidator validator = new GroovyJsonMessageValidator();
reset(referenceResolver);
when(referenceResolver.resolve("groovyMessageValidator")).thenReturn(validator);
when(referenceResolver.resolve(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(referenceResolver.resolveAll(SequenceBeforeTest.class)).thenReturn(new HashMap<>());
when(referenceResolver.resolveAll(SequenceAfterTest.class)).thenReturn(new HashMap<>());
context.setReferenceResolver(referenceResolver);
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.JSON)
.validateScript("assert true")
.validator("groovyMessageValidator")
.header("operation", "sayHello");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getMessageType(), MessageType.JSON.name());
Assert.assertEquals(action.getValidators().size(), 1L);
Assert.assertEquals(action.getValidators().get(0), validator);
Assert.assertEquals(action.getValidationContexts().size(), 3L);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(ScriptValidationContext.class::isInstance));
ScriptValidationContext validationContext = action.getValidationContexts().stream()
.filter(ScriptValidationContext.class::isInstance).findFirst()
.map(ScriptValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertEquals(validationContext.getScriptType(), ScriptTypes.GROOVY);
Assert.assertEquals(validationContext.getValidationScript(), "assert true");
Assert.assertNull(validationContext.getValidationScriptResourcePath());
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertTrue(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessageHeaders(context).containsKey("operation"));
}
@Test
public void testReceiveBuilderWithNamespaceValidation() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest xmlns:pfx=\"http://www.consol.de/schemas/test\"><Message>Hello World!</Message></TestRequest>")
.validateNamespace("pfx", "http://www.consol.de/schemas/test");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 2);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
XmlMessageValidationContext validationContext = action.getValidationContexts().stream()
.filter(XmlMessageValidationContext.class::isInstance).findFirst()
.map(XmlMessageValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()),
"<TestRequest xmlns:pfx=\"http://www.consol.de/schemas/test\"><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(validationContext.getControlNamespaces().get("pfx"), "http://www.consol.de/schemas/test");
}
@Test
public void testReceiveBuilderWithXpathExpressions() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message lang=\"ENG\">Hello World!</Message></TestRequest>")
.validate("Foo.operation", "foo")
.validate("Foo.message", "control");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 2);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XpathMessageValidationContext.class::isInstance));
XpathMessageValidationContext validationContext = action.getValidationContexts().stream()
.filter(XpathMessageValidationContext.class::isInstance).findFirst()
.map(XpathMessageValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(validationContext.getXpathExpressions().size(), 2L);
Assert.assertEquals(validationContext.getXpathExpressions().get("Foo.operation"), "foo");
Assert.assertEquals(validationContext.getXpathExpressions().get("Foo.message"), "control");
}
@Test
public void testReceiveBuilderWithJsonPathExpressions() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.JSON)
.payload("{\"text\":\"Hello World!\", \"person\":{\"name\":\"John\",\"surname\":\"Doe\",\"active\": true}, \"index\":5, \"id\":\"x123456789x\"}")
.validate("$.person.name", "foo")
.validate("$.person.active", true)
.validate("$.id", containsString("123456789"))
.validate("$.text", "Hello World!")
.validate("$.index", 5);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.JSON.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 3);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonPathMessageValidationContext.class::isInstance));
JsonPathMessageValidationContext validationContext = action.getValidationContexts().stream()
.filter(JsonPathMessageValidationContext.class::isInstance).findFirst()
.map(JsonPathMessageValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(validationContext.getJsonPathExpressions().size(), 5L);
Assert.assertEquals(validationContext.getJsonPathExpressions().get("$.person.name"), "foo");
Assert.assertEquals(validationContext.getJsonPathExpressions().get("$.person.active"), true);
Assert.assertEquals(validationContext.getJsonPathExpressions().get("$.text"), "Hello World!");
Assert.assertEquals(validationContext.getJsonPathExpressions().get("$.index"), 5);
Assert.assertEquals(validationContext.getJsonPathExpressions().get("$.id").getClass(), StringContains.class);
}
@Test
public void testReceiveBuilderWithJsonPathExpressionsInvalidMessageType() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.XML)
.payload("{\"text\":\"Hello World!\"}")
.validate("$.text", "Hello World!");
}
};
builder.configure();
}
@Test
public void testReceiveBuilderWithIgnoreElementsXpath() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>?</Message></TestRequest>")
.ignore("TestRequest.Message");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 2);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
XmlMessageValidationContext validationContext = action.getValidationContexts().stream()
.filter(XmlMessageValidationContext.class::isInstance).findFirst()
.map(XmlMessageValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>?</Message></TestRequest>");
Assert.assertEquals(validationContext.getIgnoreExpressions().size(), 1L);
Assert.assertEquals(validationContext.getIgnoreExpressions().iterator().next(), "TestRequest.Message");
}
@Test
public void testReceiveBuilderWithIgnoreElementsJsonPath() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.messageType(MessageType.JSON)
.payload("{\"text\":\"Hello World!\", \"person\": {\"name\": \"Penny\", age: 25}}")
.ignore("$..text")
.ignore("$.person.age");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.JSON.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 2);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(JsonMessageValidationContext.class::isInstance));
JsonMessageValidationContext validationContext = action.getValidationContexts().stream()
.filter(JsonMessageValidationContext.class::isInstance).findFirst()
.map(JsonMessageValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder) action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "{\"text\":\"Hello World!\", \"person\": {\"name\": \"Penny\", age: 25}}");
Assert.assertEquals(validationContext.getIgnoreExpressions().size(), 2L);
Assert.assertTrue(validationContext.getIgnoreExpressions().contains("$..text"));
Assert.assertTrue(validationContext.getIgnoreExpressions().contains("$.person.age"));
}
@Test
public void testReceiveBuilderWithSchema() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>?</Message></TestRequest>")
.xsd("testSchema");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 2);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
XmlMessageValidationContext validationContext = action.getValidationContexts().stream()
.filter(XmlMessageValidationContext.class::isInstance).findFirst()
.map(XmlMessageValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>?</Message></TestRequest>");
Assert.assertEquals(validationContext.getSchema(), "testSchema");
}
@Test
public void testReceiveBuilderWithSchemaRepository() {
MockTestDesigner builder = new MockTestDesigner(context) {
@Override
public void configure() {
receive(messageEndpoint)
.payload("<TestRequest><Message>?</Message></TestRequest>")
.xsdSchemaRepository("testSchemaRepository");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), ReceiveMessageAction.class);
ReceiveMessageAction action = (ReceiveMessageAction) test.getActions().get(0);
Assert.assertEquals(action.getName(), "receive");
Assert.assertEquals(action.getMessageType(), MessageType.XML.name());
Assert.assertEquals(action.getEndpoint(), messageEndpoint);
Assert.assertEquals(action.getValidationContexts().size(), 2);
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(HeaderValidationContext.class::isInstance));
Assert.assertTrue(action.getValidationContexts().stream().anyMatch(XmlMessageValidationContext.class::isInstance));
XmlMessageValidationContext validationContext = action.getValidationContexts().stream()
.filter(XmlMessageValidationContext.class::isInstance).findFirst()
.map(XmlMessageValidationContext.class::cast)
.orElseThrow(() -> new AssertionError("Missing validation context"));
Assert.assertTrue(action.getMessageBuilder() instanceof DefaultMessageBuilder);
Assert.assertEquals(((DefaultMessageBuilder)action.getMessageBuilder()).buildMessagePayload(context, action.getMessageType()), "<TestRequest><Message>?</Message></TestRequest>");
Assert.assertEquals(validationContext.getSchemaRepository(), "testSchemaRepository");
}
}
| |
package com.mapswithme.maps.ugc.routes;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import androidx.recyclerview.widget.DividerItemDecoration;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.util.Pair;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.mapswithme.maps.R;
import com.mapswithme.maps.adapter.AdapterPositionConverter;
import com.mapswithme.maps.adapter.OnItemClickListener;
import com.mapswithme.maps.adapter.RecyclerCompositeAdapter;
import com.mapswithme.maps.adapter.RepeatablePairPositionConverter;
import com.mapswithme.maps.adapter.TagGroupNameAdapter;
import com.mapswithme.maps.adapter.TagsAdapter;
import com.mapswithme.maps.adapter.TagsCompositeAdapter;
import com.mapswithme.maps.base.BaseMwmFragment;
import com.mapswithme.maps.bookmarks.data.BookmarkManager;
import com.mapswithme.maps.bookmarks.data.CatalogCustomProperty;
import com.mapswithme.maps.bookmarks.data.CatalogTag;
import com.mapswithme.maps.bookmarks.data.CatalogTagsGroup;
import com.mapswithme.maps.dialog.AlertDialog;
import com.mapswithme.maps.dialog.AlertDialogCallback;
import com.mapswithme.maps.widget.recycler.ItemDecoratorFactory;
import com.mapswithme.util.UiUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public class UgcRouteTagsFragment extends BaseMwmFragment implements BookmarkManager.BookmarksCatalogListener,
OnItemClickListener<Pair<TagsAdapter, TagsAdapter.TagViewHolder>>,
AlertDialogCallback
{
private static final String BUNDLE_SELECTED_TAGS = "bundle_saved_tags";
private static final String ERROR_LOADING_DIALOG_TAG = "error_loading_dialog";
private static final int ERROR_LOADING_DIALOG_REQ_CODE = 205;
@SuppressWarnings("NullableProblems")
@NonNull
private RecyclerView mRecycler;
@SuppressWarnings("NullableProblems")
@NonNull
private View mProgress;
@SuppressWarnings("NullableProblems")
@NonNull
private ViewGroup mTagsContainer;
@Nullable
private Bundle mSavedInstanceState;
@Nullable
private TagsCompositeAdapter mTagsAdapter;
@SuppressWarnings("NullableProblems")
@NonNull
private TextView mDescriptionView;
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container,
@Nullable Bundle savedInstanceState)
{
ViewGroup root = (ViewGroup) inflater.inflate(R.layout.fragment_ugc_routes, container,false);
setHasOptionsMenu(true);
mProgress = root.findViewById(R.id.progress_container);
mTagsContainer = root.findViewById(R.id.tags_container);
mDescriptionView = root.findViewById(R.id.ugc_route_tags_desc);
initRecycler(root);
UiUtils.hide(mTagsContainer);
UiUtils.show(mProgress);
BookmarkManager.INSTANCE.requestRouteTags();
mSavedInstanceState = savedInstanceState;
return root;
}
private void initRecycler(@NonNull ViewGroup root)
{
mRecycler = root.findViewById(R.id.recycler);
mRecycler.setItemAnimator(null);
RecyclerView.ItemDecoration decor = ItemDecoratorFactory.createRatingRecordDecorator(
getContext().getApplicationContext(),
DividerItemDecoration.VERTICAL, R.drawable.divider_transparent_half_plus_eight);
mRecycler.addItemDecoration(decor);
}
private void onRetryClicked()
{
UiUtils.hide(mTagsContainer);
UiUtils.show(mProgress);
BookmarkManager.INSTANCE.requestRouteTags();
}
private void showErrorLoadingDialog()
{
AlertDialog dialog = new AlertDialog.Builder()
.setTitleId(R.string.title_error_downloading_bookmarks)
.setMessageId(R.string.tags_loading_error_subtitle)
.setPositiveBtnId(R.string.try_again)
.setNegativeBtnId(R.string.cancel)
.setReqCode(ERROR_LOADING_DIALOG_REQ_CODE)
.setFragManagerStrategyType(AlertDialog.FragManagerStrategyType.ACTIVITY_FRAGMENT_MANAGER)
.build();
dialog.setTargetFragment(this, ERROR_LOADING_DIALOG_REQ_CODE);
dialog.show(this, ERROR_LOADING_DIALOG_TAG);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater)
{
inflater.inflate(R.menu.menu_done, menu);
}
@Override
public void onPrepareOptionsMenu(Menu menu)
{
super.onPrepareOptionsMenu(menu);
MenuItem item = menu.findItem(R.id.done);
item.setVisible(hasSelectedItems());
}
private boolean hasSelectedItems()
{
return mTagsAdapter != null && mTagsAdapter.hasSelectedItems();
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
if (item.getItemId() == R.id.done)
{
onDoneOptionItemClicked();
return true;
}
return super.onOptionsItemSelected(item);
}
private void onDoneOptionItemClicked()
{
if (mTagsAdapter == null)
return;
ArrayList<CatalogTag> value = new ArrayList<>(mTagsAdapter.getSelectedTags());
Intent result = new Intent().putParcelableArrayListExtra(UgcRouteTagsActivity.EXTRA_TAGS, value);
getActivity().setResult(Activity.RESULT_OK, result);
getActivity().finish();
}
@Override
public void onSaveInstanceState(Bundle outState)
{
super.onSaveInstanceState(outState);
if (mTagsAdapter != null)
outState.putParcelableArrayList(BUNDLE_SELECTED_TAGS, new ArrayList<>(mTagsAdapter.getSelectedTags()));
}
@Override
public void onStart()
{
super.onStart();
BookmarkManager.INSTANCE.addCatalogListener(this);
}
@Override
public void onStop()
{
super.onStop();
BookmarkManager.INSTANCE.removeCatalogListener(this);
}
@Override
public void onImportStarted(@NonNull String serverId)
{
/* Do nothing by default */
}
@Override
public void onImportFinished(@NonNull String serverId, long catId, boolean successful)
{
/* Do nothing by default */
}
@Override
public void onTagsReceived(boolean successful, @NonNull List<CatalogTagsGroup> tagsGroups,
int tagsLimit)
{
UiUtils.showIf(successful && tagsGroups.size() != 0, mTagsContainer);
UiUtils.hide(mProgress);
if (tagsGroups.size() == 0 || !successful)
{
showErrorLoadingDialog();
return;
}
installTags(tagsGroups, tagsLimit);
}
@Override
public void onCustomPropertiesReceived(boolean successful,
@NonNull List<CatalogCustomProperty> properties)
{
/* Not ready yet */
}
private void installTags(@NonNull List<CatalogTagsGroup> tagsGroups, int tagsLimit)
{
List<CatalogTag> savedStateTags = validateSavedState(mSavedInstanceState);
TagGroupNameAdapter categoryAdapter = new TagGroupNameAdapter(tagsGroups);
mTagsAdapter = new TagsCompositeAdapter(getContext(), tagsGroups, savedStateTags, this,
tagsLimit);
RecyclerCompositeAdapter compositeAdapter = makeCompositeAdapter(categoryAdapter, mTagsAdapter);
LinearLayoutManager layoutManager = new LinearLayoutManager(getContext(),
LinearLayoutManager.VERTICAL,
false);
mRecycler.setLayoutManager(layoutManager);
mRecycler.setAdapter(compositeAdapter);
String description = getString(R.string.ugc_route_tags_desc, String.valueOf(tagsLimit));
mDescriptionView.setText(description);
requireActivity().invalidateOptionsMenu();
}
@NonNull
private static List<CatalogTag> validateSavedState(@Nullable Bundle savedState)
{
List<CatalogTag> tags;
if (savedState == null || (tags = savedState.getParcelableArrayList(BUNDLE_SELECTED_TAGS)) == null)
return Collections.emptyList();
return tags;
}
@NonNull
private static RecyclerCompositeAdapter makeCompositeAdapter(@NonNull TagGroupNameAdapter categoryAdapter,
@NonNull TagsCompositeAdapter tagsCompositeAdapter)
{
AdapterPositionConverter converter = new RepeatablePairPositionConverter(categoryAdapter,
tagsCompositeAdapter);
return new RecyclerCompositeAdapter(converter, categoryAdapter, tagsCompositeAdapter);
}
@Override
public void onUploadStarted(long originCategoryId)
{
/* Do nothing by default */
}
@Override
public void onUploadFinished(@NonNull BookmarkManager.UploadResult uploadResult, @NonNull String description,
long originCategoryId, long resultCategoryId)
{
/* Do nothing by default */
}
@Override
public void onItemClick(@NonNull View v,
@NonNull Pair<TagsAdapter, TagsAdapter.TagViewHolder> item)
{
ActivityCompat.invalidateOptionsMenu(getActivity());
Objects.requireNonNull(mTagsAdapter);
for (int i = 0; i < mTagsAdapter.getItemCount(); i++)
{
mTagsAdapter.getItem(i).notifyDataSetChanged();
}
}
@Override
public void onAlertDialogPositiveClick(int requestCode, int which)
{
onRetryClicked();
}
@Override
public void onAlertDialogNegativeClick(int requestCode, int which)
{
getActivity().setResult(Activity.RESULT_CANCELED);
getActivity().finish();
}
@Override
public void onAlertDialogCancel(int requestCode)
{
getActivity().setResult(Activity.RESULT_CANCELED);
getActivity().finish();
}
}
| |
package org.codehaus.groovy.grails.web.json;
/*
Copyright (c) 2002 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import org.apache.commons.lang.UnhandledException;
import java.io.IOException;
import java.io.Writer;
import java.util.*;
/**
* A JSONArray is an ordered sequence of values. Its external text form is a
* string wrapped in square brackets with commas separating the values. The
* internal form is an object having <code>get</code> and <code>opt</code>
* methods for accessing the values by index, and <code>put</code> methods for
* adding or replacing values. The values can be any of these types:
* <code>Boolean</code>, <code>JSONArray</code>, <code>JSONObject</code>,
* <code>Number</code>, <code>String</code>, or the
* <code>JSONObject.NULL object</code>.
* <p/>
* The constructor can convert a JSON text into a Java object. The
* <code>toString</code> method converts to JSON text.
* <p/>
* A <code>get</code> method returns a value if one can be found, and throws an
* exception if one cannot be found. An <code>opt</code> method returns a
* default value instead of throwing an exception, and so is useful for
* obtaining optional values.
* <p/>
* The generic <code>get()</code> and <code>opt()</code> methods return an
* object which you can cast or query for type. There are also typed
* <code>get</code> and <code>opt</code> methods that do type checking and type
* coersion for you.
* <p/>
* The texts produced by the <code>toString</code> methods strictly conform to
* JSON syntax rules. The constructors are more forgiving in the texts they will
* accept:
* <ul>
* <li>An extra <code>,</code> <small>(comma)</small> may appear just
* before the closing bracket.</li>
* <li>The <code>null</code> value will be inserted when there
* is <code>,</code> <small>(comma)</small> elision.</li>
* <li>Strings may be quoted with <code>'</code> <small>(single
* quote)</small>.</li>
* <li>Strings do not need to be quoted at all if they do not begin with a quote
* or single quote, and if they do not contain leading or trailing spaces,
* and if they do not contain any of these characters:
* <code>{ } [ ] / \ : , = ; #</code> and if they do not look like numbers
* and if they are not the reserved words <code>true</code>,
* <code>false</code>, or <code>null</code>.</li>
* <li>Values can be separated by <code>;</code> <small>(semicolon)</small> as
* well as by <code>,</code> <small>(comma)</small>.</li>
* <li>Numbers may have the <code>0-</code> <small>(octal)</small> or
* <code>0x-</code> <small>(hex)</small> prefix.</li>
* <li>Comments written in the slashshlash, slashstar, and hash conventions
* will be ignored.</li>
* </ul>
*
* @author JSON.org
* @version 2
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class JSONArray implements JSONElement, List {
/**
* The arrayList where the JSONArray's properties are kept.
*/
private ArrayList myArrayList;
/**
* Construct an empty JSONArray.
*/
public JSONArray() {
this.myArrayList = new ArrayList();
}
/**
* Construct a JSONArray from a JSONTokener.
*
* @param x A JSONTokener
* @throws JSONException If there is a syntax error.
*/
public JSONArray(JSONTokener x) throws JSONException {
this();
if (x.nextClean() != '[') {
throw x.syntaxError("A JSONArray text must start with '['");
}
if (x.nextClean() == ']') {
return;
}
x.back();
for (; ;) {
if (x.nextClean() == ',') {
x.back();
this.myArrayList.add(null);
} else {
x.back();
this.myArrayList.add(x.nextValue());
}
switch (x.nextClean()) {
case ';':
case ',':
if (x.nextClean() == ']') {
return;
}
x.back();
break;
case ']':
return;
default:
throw x.syntaxError("Expected a ',' or ']'");
}
}
}
/**
* Construct a JSONArray from a source sJSON text.
*
* @param string A string that begins with
* <code>[</code> <small>(left bracket)</small>
* and ends with <code>]</code> <small>(right bracket)</small>.
* @throws JSONException If there is a syntax error.
*/
public JSONArray(String string) throws JSONException {
this(new JSONTokener(string));
}
/**
* Construct a JSONArray from a Collection.
*
* @param collection A Collection.
*/
public JSONArray(Collection collection) {
this.myArrayList = new ArrayList(collection);
}
/**
* Get the object value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return An object value.
* @throws JSONException If there is no value for the index.
*/
public Object get(int index) {
Object o = opt(index);
if (o == null) {
throw new UnhandledException(new JSONException("JSONArray[" + index + "] not found."));
}
return o;
}
public Object set(int i, Object o) {
return myArrayList.set(i, o);
}
public boolean add(Object o) {
return myArrayList.add(o);
}
public void add(int i, Object o) {
myArrayList.add(i, o);
}
public Object remove(int i) {
return myArrayList.remove(i);
}
public boolean remove(Object o) {
return myArrayList.remove(o);
}
public void clear() {
myArrayList.clear();
}
public boolean addAll(Collection collection) {
return myArrayList.addAll(collection);
}
public boolean addAll(int i, Collection collection) {
return myArrayList.addAll(i, collection);
}
public Iterator iterator() {
return myArrayList.iterator();
}
public ListIterator listIterator() {
return myArrayList.listIterator();
}
public ListIterator listIterator(int i) {
return myArrayList.listIterator(i);
}
public List subList(int i, int i1) {
return myArrayList.subList(i, i1);
}
public boolean containsAll(Collection collection) {
return myArrayList.containsAll(collection);
}
public boolean removeAll(Collection collection) {
return myArrayList.removeAll(collection);
}
public boolean retainAll(Collection collection) {
return myArrayList.retainAll(collection);
}
/**
* Get the boolean value associated with an index.
* The string values "true" and "false" are converted to boolean.
*
* @param index The index must be between 0 and length() - 1.
* @return The truth.
* @throws JSONException If there is no value for the index or if the
* value is not convertable to boolean.
*/
public boolean getBoolean(int index) throws JSONException {
Object o = get(index);
if (o.equals(Boolean.FALSE) ||
(o instanceof String &&
((String) o).equalsIgnoreCase("false"))) {
return false;
} else if (o.equals(Boolean.TRUE) ||
(o instanceof String &&
((String) o).equalsIgnoreCase("true"))) {
return true;
}
throw new JSONException("JSONArray[" + index + "] is not a Boolean.");
}
/**
* Get the double value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
*/
public double getDouble(int index) throws JSONException {
Object o = get(index);
try {
return o instanceof Number ?
((Number) o).doubleValue() : Double.parseDouble((String) o);
} catch (Exception e) {
throw new JSONException("JSONArray[" + index +
"] is not a number.");
}
}
/**
* Get the int value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
* if the value cannot be converted to a number.
*/
public int getInt(int index) throws JSONException {
Object o = get(index);
return o instanceof Number ?
((Number) o).intValue() : (int) getDouble(index);
}
/**
* Get the JSONArray associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return A JSONArray value.
* @throws JSONException If there is no value for the index. or if the
* value is not a JSONArray
*/
public JSONArray getJSONArray(int index) throws JSONException {
Object o = get(index);
if (o instanceof JSONArray) {
return (JSONArray) o;
}
throw new JSONException("JSONArray[" + index +
"] is not a JSONArray.");
}
/**
* Get the JSONObject associated with an index.
*
* @param index subscript
* @return A JSONObject value.
* @throws JSONException If there is no value for the index or if the
* value is not a JSONObject
*/
public JSONObject getJSONObject(int index) throws JSONException {
Object o = get(index);
if (o instanceof JSONObject) {
return (JSONObject) o;
}
throw new JSONException("JSONArray[" + index +
"] is not a JSONObject.");
}
/**
* Get the long value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
*/
public long getLong(int index) throws JSONException {
Object o = get(index);
return o instanceof Number ?
((Number) o).longValue() : (long) getDouble(index);
}
/**
* Get the string associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return A string value.
* @throws JSONException If there is no value for the index.
*/
public String getString(int index) throws JSONException {
return get(index).toString();
}
/**
* Determine if the value is null.
*
* @param index The index must be between 0 and length() - 1.
* @return true if the value at the index is null, or if there is no value.
*/
public boolean isNull(int index) {
return JSONObject.NULL.equals(opt(index));
}
/**
* Make a string from the contents of this JSONArray. The
* <code>separator</code> string is inserted between each element.
* Warning: This method assumes that the data structure is acyclical.
*
* @param separator A string that will be inserted between the elements.
* @return a string.
* @throws JSONException If the array contains an invalid number.
*/
public String join(String separator) throws JSONException {
int len = length();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < len; i += 1) {
if (i > 0) {
sb.append(separator);
}
sb.append(JSONObject.valueToString(this.myArrayList.get(i)));
}
return sb.toString();
}
/**
* Get the number of elements in the JSONArray, included nulls.
*
* @return The length (or size).
*/
public int length() {
return myArrayList.size();
}
/**
* Get the optional object value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return An object value, or null if there is no
* object at that index.
*/
public Object opt(int index) {
return (index < 0 || index >= length()) ?
null : this.myArrayList.get(index);
}
/**
* Get the optional boolean value associated with an index.
* It returns false if there is no value at that index,
* or if the value is not Boolean.TRUE or the String "true".
*
* @param index The index must be between 0 and length() - 1.
* @return The truth.
*/
public boolean optBoolean(int index) {
return optBoolean(index, false);
}
/**
* Get the optional boolean value associated with an index.
* It returns the defaultValue if there is no value at that index or if
* it is not a Boolean or the String "true" or "false" (case insensitive).
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue A boolean default.
* @return The truth.
*/
public boolean optBoolean(int index, boolean defaultValue) {
try {
return getBoolean(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional double value associated with an index.
* NaN is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public double optDouble(int index) {
return optDouble(index, Double.NaN);
}
/**
* Get the optional double value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index subscript
* @param defaultValue The default value.
* @return The value.
*/
public double optDouble(int index, double defaultValue) {
try {
return getDouble(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional int value associated with an index.
* Zero is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public int optInt(int index) {
return optInt(index, 0);
}
/**
* Get the optional int value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return The value.
*/
public int optInt(int index, int defaultValue) {
try {
return getInt(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional JSONArray associated with an index.
*
* @param index subscript
* @return A JSONArray value, or null if the index has no value,
* or if the value is not a JSONArray.
*/
public JSONArray optJSONArray(int index) {
Object o = opt(index);
return o instanceof JSONArray ? (JSONArray) o : null;
}
/**
* Get the optional JSONObject associated with an index.
* Null is returned if the key is not found, or null if the index has
* no value, or if the value is not a JSONObject.
*
* @param index The index must be between 0 and length() - 1.
* @return A JSONObject value.
*/
public JSONObject optJSONObject(int index) {
Object o = opt(index);
return o instanceof JSONObject ? (JSONObject) o : null;
}
/**
* Get the optional long value associated with an index.
* Zero is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public long optLong(int index) {
return optLong(index, 0);
}
/**
* Get the optional long value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return The value.
*/
public long optLong(int index, long defaultValue) {
try {
return getLong(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional string value associated with an index. It returns an
* empty string if there is no value at that index. If the value
* is not a string and is not null, then it is coverted to a string.
*
* @param index The index must be between 0 and length() - 1.
* @return A String value.
*/
public String optString(int index) {
return optString(index, "");
}
/**
* Get the optional string associated with an index.
* The defaultValue is returned if the key is not found.
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return A String value.
*/
public String optString(int index, String defaultValue) {
Object o = opt(index);
return o != null ? o.toString() : defaultValue;
}
/**
* Append a boolean value. This increases the array's length by one.
*
* @param value A boolean value.
* @return this
*/
public JSONArray put(boolean value) {
put(value ? Boolean.TRUE : Boolean.FALSE);
return this;
}
/**
* Append a double value. This increases the array's length by one.
*
* @param value A double value.
* @return this
* @throws JSONException if the value is not finite.
*/
public JSONArray put(double value) throws JSONException {
Double d = Double.valueOf(value);
JSONObject.testValidity(d);
put(d);
return this;
}
/**
* Append an int value. This increases the array's length by one.
*
* @param value An int value.
* @return this
*/
public JSONArray put(int value) {
put(Integer.valueOf(value));
return this;
}
/**
* Append an long value. This increases the array's length by one.
*
* @param value A long value.
* @return this
*/
public JSONArray put(long value) {
put(Long.valueOf(value));
return this;
}
/**
* Append an object value. This increases the array's length by one.
*
* @param value An object value. The value should be a
* Boolean, Double, Integer, JSONArray, JSObject, Long, or String, or the
* JSONObject.NULL object.
* @return this
*/
public JSONArray put(Object value) {
this.myArrayList.add(value);
return this;
}
/**
* Put or replace a boolean value in the JSONArray. If the index is greater
* than the length of the JSONArray, then null elements will be added as
* necessary to pad it out.
*
* @param index The subscript.
* @param value A boolean value.
* @return this
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, boolean value) throws JSONException {
put(index, value ? Boolean.TRUE : Boolean.FALSE);
return this;
}
/**
* Put or replace a double value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
*
* @param index The subscript.
* @param value A double value.
* @return this
* @throws JSONException If the index is negative or if the value is
* not finite.
*/
public JSONArray put(int index, double value) throws JSONException {
put(index, Double.valueOf(value));
return this;
}
/**
* Put or replace an int value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
*
* @param index The subscript.
* @param value An int value.
* @return this
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, int value) throws JSONException {
put(index, Integer.valueOf(value));
return this;
}
/**
* Put or replace a long value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
*
* @param index The subscript.
* @param value A long value.
* @return this
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, long value) throws JSONException {
put(index, Long.valueOf(value));
return this;
}
/**
* Put or replace an object value in the JSONArray. If the index is greater
* than the length of the JSONArray, then null elements will be added as
* necessary to pad it out.
*
* @param index The subscript.
* @param value The value to put into the array.
* @return this
* @throws JSONException If the index is negative or if the the value is
* an invalid number.
*/
public JSONArray put(int index, Object value) throws JSONException {
JSONObject.testValidity(value);
if (index < 0) {
throw new JSONException("JSONArray[" + index + "] not found.");
}
if (index < length()) {
this.myArrayList.set(index, value);
} else {
while (index != length()) {
put(null);
}
put(value);
}
return this;
}
/**
* Produce a JSONObject by combining a JSONArray of names with the values
* of this JSONArray.
*
* @param names A JSONArray containing a list of key strings. These will be
* paired with the values.
* @return A JSONObject, or null if there are no names or if this JSONArray
* has no values.
* @throws JSONException If any of the names are null.
*/
public JSONObject toJSONObject(JSONArray names) throws JSONException {
if (names == null || names.length() == 0 || length() == 0) {
return null;
}
JSONObject jo = new JSONObject();
for (int i = 0; i < names.length(); i += 1) {
jo.put(names.getString(i), this.opt(i));
}
return jo;
}
/**
* Make an JSON text of this JSONArray. For compactness, no
* unnecessary whitespace is added. If it is not possible to produce a
* syntactically correct JSON text then null will be returned instead. This
* could occur if the array contains an invalid number.
* <p/>
* Warning: This method assumes that the data structure is acyclical.
*
* @return a printable, displayable, transmittable
* representation of the array.
*/
@Override
public String toString() {
try {
return '[' + join(",") + ']';
} catch (Exception e) {
return null;
}
}
/**
* Make a prettyprinted JSON text of this JSONArray.
* Warning: This method assumes that the data structure is acyclical.
*
* @param indentFactor The number of spaces to add to each level of
* indentation.
* @return a printable, displayable, transmittable
* representation of the object, beginning
* with <code>[</code> <small>(left bracket)</small> and ending
* with <code>]</code> <small>(right bracket)</small>.
* @throws JSONException
*/
public String toString(int indentFactor) throws JSONException {
return toString(indentFactor, 0);
}
/**
* Make a prettyprinted JSON text of this JSONArray.
* Warning: This method assumes that the data structure is acyclical.
*
* @param indentFactor The number of spaces to add to each level of
* indentation.
* @param indent The indention of the top level.
* @return a printable, displayable, transmittable
* representation of the array.
* @throws JSONException
*/
String toString(int indentFactor, int indent) throws JSONException {
int len = length();
if (len == 0) {
return "[]";
}
int i;
StringBuilder sb = new StringBuilder("[");
if (len == 1) {
sb.append(JSONObject.valueToString(this.myArrayList.get(0),
indentFactor, indent));
} else {
int newindent = indent + indentFactor;
sb.append('\n');
for (i = 0; i < len; i += 1) {
if (i > 0) {
sb.append(",\n");
}
for (int j = 0; j < newindent; j += 1) {
sb.append(' ');
}
sb.append(JSONObject.valueToString(this.myArrayList.get(i),
indentFactor, newindent));
}
sb.append('\n');
for (i = 0; i < indent; i += 1) {
sb.append(' ');
}
}
sb.append(']');
return sb.toString();
}
/**
* If the passed object is a JSONArray, then the underlying collection must be equal.
*/
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) return false;
if (this == obj) return true;
JSONArray that = (JSONArray) obj;
if (myArrayList != null ? !myArrayList.equals(that.myArrayList) : that.myArrayList != null) return false;
return true;
}
@Override
public int hashCode() {
return myArrayList != null ? myArrayList.hashCode() : super.hashCode();
}
/**
* Write the contents of the JSONArray as JSON text to a writer.
* For compactness, no whitespace is added.
* <p/>
* Warning: This method assumes that the data structure is acyclical.
*
* @return The writer.
* @throws JSONException
*/
public Writer write(Writer writer) throws JSONException {
try {
boolean b = false;
int len = length();
writer.write('[');
for (int i = 0; i < len; i += 1) {
if (b) {
writer.write(',');
}
Object v = this.myArrayList.get(i);
if (v instanceof JSONObject) {
((JSONObject) v).write(writer);
} else if (v instanceof JSONArray) {
((JSONArray) v).write(writer);
} else {
writer.write(JSONObject.valueToString(v));
}
b = true;
}
writer.write(']');
return writer;
} catch (IOException e) {
throw new JSONException(e);
}
}
public void trimToSize() {
myArrayList.trimToSize();
}
public void ensureCapacity(int i) {
myArrayList.ensureCapacity(i);
}
public int size() {
return myArrayList.size();
}
public boolean isEmpty() {
return myArrayList.isEmpty();
}
public boolean contains(Object o) {
return myArrayList.contains(o);
}
public int indexOf(Object o) {
return myArrayList.indexOf(o);
}
public int lastIndexOf(Object o) {
return myArrayList.lastIndexOf(o);
}
@Override
public Object clone() {
return myArrayList.clone();
}
public Object[] toArray() {
return myArrayList.toArray();
}
public Object[] toArray(Object[] objects) {
return myArrayList.toArray(objects);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DF;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.DataNodeVolumeMetrics;
import org.apache.hadoop.util.AutoCloseableLock;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.datanode.DirectoryScanner.ReportCompiler;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi.FsVolumeReferences;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.LazyPersistTestCase;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
/**
* Tests {@link DirectoryScanner} handling of differences
* between blocks on the disk and block in memory.
*/
public class TestDirectoryScanner {
private static final Log LOG = LogFactory.getLog(TestDirectoryScanner.class);
private static final Configuration CONF = new HdfsConfiguration();
private static final int DEFAULT_GEN_STAMP = 9999;
private MiniDFSCluster cluster;
private String bpid;
private DFSClient client;
private FsDatasetSpi<? extends FsVolumeSpi> fds = null;
private DirectoryScanner scanner = null;
private final Random rand = new Random();
private final Random r = new Random();
private static final int BLOCK_LENGTH = 100;
static {
CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_LENGTH);
CONF.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, 1);
CONF.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
CONF.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
Long.MAX_VALUE);
}
@Before
public void setup() {
LazyPersistTestCase.initCacheManipulator();
}
/** create a file with a length of <code>fileLen</code> */
private List<LocatedBlock> createFile(String fileNamePrefix,
long fileLen,
boolean isLazyPersist) throws IOException {
FileSystem fs = cluster.getFileSystem();
Path filePath = new Path("/" + fileNamePrefix + ".dat");
DFSTestUtil.createFile(
fs, filePath, isLazyPersist, 1024, fileLen,
BLOCK_LENGTH, (short) 1, r.nextLong(), false);
return client.getLocatedBlocks(filePath.toString(), 0, fileLen).getLocatedBlocks();
}
/** Truncate a block file */
private long truncateBlockFile() throws IOException {
try(AutoCloseableLock lock = fds.acquireDatasetLock()) {
for (ReplicaInfo b : FsDatasetTestUtil.getReplicas(fds, bpid)) {
File f = new File(b.getBlockURI());
File mf = new File(b.getMetadataURI());
// Truncate a block file that has a corresponding metadata file
if (f.exists() && f.length() != 0 && mf.exists()) {
FileOutputStream s = null;
FileChannel channel = null;
try {
s = new FileOutputStream(f);
channel = s.getChannel();
channel.truncate(0);
LOG.info("Truncated block file " + f.getAbsolutePath());
return b.getBlockId();
} finally {
IOUtils.cleanup(LOG, channel, s);
}
}
}
}
return 0;
}
/** Delete a block file */
private long deleteBlockFile() {
try(AutoCloseableLock lock = fds.acquireDatasetLock()) {
for (ReplicaInfo b : FsDatasetTestUtil.getReplicas(fds, bpid)) {
File f = new File(b.getBlockURI());
File mf = new File(b.getMetadataURI());
// Delete a block file that has corresponding metadata file
if (f.exists() && mf.exists() && f.delete()) {
LOG.info("Deleting block file " + f.getAbsolutePath());
return b.getBlockId();
}
}
}
return 0;
}
/** Delete block meta file */
private long deleteMetaFile() {
try(AutoCloseableLock lock = fds.acquireDatasetLock()) {
for (ReplicaInfo b : FsDatasetTestUtil.getReplicas(fds, bpid)) {
// Delete a metadata file
if (b.metadataExists() && b.deleteMetadata()) {
LOG.info("Deleting metadata " + b.getMetadataURI());
return b.getBlockId();
}
}
}
return 0;
}
/**
* Duplicate the given block on all volumes.
* @param blockId
* @throws IOException
*/
private void duplicateBlock(long blockId) throws IOException {
try(AutoCloseableLock lock = fds.acquireDatasetLock()) {
ReplicaInfo b = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
try (FsDatasetSpi.FsVolumeReferences volumes =
fds.getFsVolumeReferences()) {
for (FsVolumeSpi v : volumes) {
if (v.getStorageID().equals(b.getVolume().getStorageID())) {
continue;
}
// Volume without a copy of the block. Make a copy now.
File sourceBlock = new File(b.getBlockURI());
File sourceMeta = new File(b.getMetadataURI());
URI sourceRoot = b.getVolume().getStorageLocation().getUri();
URI destRoot = v.getStorageLocation().getUri();
String relativeBlockPath =
sourceRoot.relativize(sourceBlock.toURI())
.getPath();
String relativeMetaPath =
sourceRoot.relativize(sourceMeta.toURI())
.getPath();
File destBlock = new File(new File(destRoot).toString(),
relativeBlockPath);
File destMeta = new File(new File(destRoot).toString(),
relativeMetaPath);
destBlock.getParentFile().mkdirs();
FileUtils.copyFile(sourceBlock, destBlock);
FileUtils.copyFile(sourceMeta, destMeta);
if (destBlock.exists() && destMeta.exists()) {
LOG.info("Copied " + sourceBlock + " ==> " + destBlock);
LOG.info("Copied " + sourceMeta + " ==> " + destMeta);
}
}
}
}
}
/** Get a random blockId that is not used already */
private long getFreeBlockId() {
long id = rand.nextLong();
while (true) {
id = rand.nextLong();
if (FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, id) == null) {
break;
}
}
return id;
}
private String getBlockFile(long id) {
return Block.BLOCK_FILE_PREFIX + id;
}
private String getMetaFile(long id) {
return Block.BLOCK_FILE_PREFIX + id + "_" + DEFAULT_GEN_STAMP
+ Block.METADATA_EXTENSION;
}
/** Create a block file in a random volume*/
private long createBlockFile() throws IOException {
long id = getFreeBlockId();
try (FsDatasetSpi.FsVolumeReferences volumes = fds.getFsVolumeReferences()) {
int numVolumes = volumes.size();
int index = rand.nextInt(numVolumes - 1);
File finalizedDir = ((FsVolumeImpl) volumes.get(index))
.getFinalizedDir(bpid);
File file = new File(finalizedDir, getBlockFile(id));
if (file.createNewFile()) {
LOG.info("Created block file " + file.getName());
}
}
return id;
}
/** Create a metafile in a random volume*/
private long createMetaFile() throws IOException {
long id = getFreeBlockId();
try (FsDatasetSpi.FsVolumeReferences refs = fds.getFsVolumeReferences()) {
int numVolumes = refs.size();
int index = rand.nextInt(numVolumes - 1);
File finalizedDir = ((FsVolumeImpl) refs.get(index))
.getFinalizedDir(bpid);
File file = new File(finalizedDir, getMetaFile(id));
if (file.createNewFile()) {
LOG.info("Created metafile " + file.getName());
}
}
return id;
}
/** Create block file and corresponding metafile in a rondom volume */
private long createBlockMetaFile() throws IOException {
long id = getFreeBlockId();
try (FsDatasetSpi.FsVolumeReferences refs = fds.getFsVolumeReferences()) {
int numVolumes = refs.size();
int index = rand.nextInt(numVolumes - 1);
File finalizedDir =
((FsVolumeImpl) refs.get(index)).getFinalizedDir(bpid);
File file = new File(finalizedDir, getBlockFile(id));
if (file.createNewFile()) {
LOG.info("Created block file " + file.getName());
// Create files with same prefix as block file but extension names
// such that during sorting, these files appear around meta file
// to test how DirectoryScanner handles extraneous files
String name1 = file.getAbsolutePath() + ".l";
String name2 = file.getAbsolutePath() + ".n";
file = new File(name1);
if (file.createNewFile()) {
LOG.info("Created extraneous file " + name1);
}
file = new File(name2);
if (file.createNewFile()) {
LOG.info("Created extraneous file " + name2);
}
file = new File(finalizedDir, getMetaFile(id));
if (file.createNewFile()) {
LOG.info("Created metafile " + file.getName());
}
}
}
return id;
}
private void scan(long totalBlocks, int diffsize, long missingMetaFile, long missingBlockFile,
long missingMemoryBlocks, long mismatchBlocks) throws IOException {
scan(totalBlocks, diffsize, missingMetaFile, missingBlockFile,
missingMemoryBlocks, mismatchBlocks, 0);
}
private void scan(long totalBlocks, int diffsize, long missingMetaFile, long missingBlockFile,
long missingMemoryBlocks, long mismatchBlocks, long duplicateBlocks) throws IOException {
scanner.reconcile();
assertTrue(scanner.diffs.containsKey(bpid));
LinkedList<FsVolumeSpi.ScanInfo> diff = scanner.diffs.get(bpid);
assertTrue(scanner.stats.containsKey(bpid));
DirectoryScanner.Stats stats = scanner.stats.get(bpid);
assertEquals(diffsize, diff.size());
assertEquals(totalBlocks, stats.totalBlocks);
assertEquals(missingMetaFile, stats.missingMetaFile);
assertEquals(missingBlockFile, stats.missingBlockFile);
assertEquals(missingMemoryBlocks, stats.missingMemoryBlocks);
assertEquals(mismatchBlocks, stats.mismatchBlocks);
assertEquals(duplicateBlocks, stats.duplicateBlocks);
}
@Test (timeout=300000)
public void testRetainBlockOnPersistentStorage() throws Exception {
cluster = new MiniDFSCluster
.Builder(CONF)
.storageTypes(new StorageType[] { StorageType.RAM_DISK, StorageType.DEFAULT })
.numDataNodes(1)
.build();
try {
cluster.waitActive();
DataNode dataNode = cluster.getDataNodes().get(0);
bpid = cluster.getNamesystem().getBlockPoolId();
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
scanner = new DirectoryScanner(dataNode, fds, CONF);
scanner.setRetainDiffs(true);
FsDatasetTestUtil.stopLazyWriter(cluster.getDataNodes().get(0));
// Add a file with 1 block
List<LocatedBlock> blocks =
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH, false);
// Ensure no difference between volumeMap and disk.
scan(1, 0, 0, 0, 0, 0);
// Make a copy of the block on RAM_DISK and ensure that it is
// picked up by the scanner.
duplicateBlock(blocks.get(0).getBlock().getBlockId());
scan(2, 1, 0, 0, 0, 0, 1);
verifyStorageType(blocks.get(0).getBlock().getBlockId(), false);
scan(1, 0, 0, 0, 0, 0);
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
cluster = null;
}
}
@Test (timeout=300000)
public void testDeleteBlockOnTransientStorage() throws Exception {
cluster = new MiniDFSCluster
.Builder(CONF)
.storageTypes(new StorageType[] { StorageType.RAM_DISK, StorageType.DEFAULT })
.numDataNodes(1)
.build();
try {
cluster.waitActive();
bpid = cluster.getNamesystem().getBlockPoolId();
DataNode dataNode = cluster.getDataNodes().get(0);
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
scanner = new DirectoryScanner(dataNode, fds, CONF);
scanner.setRetainDiffs(true);
FsDatasetTestUtil.stopLazyWriter(cluster.getDataNodes().get(0));
// Create a file file on RAM_DISK
List<LocatedBlock> blocks =
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH, true);
// Ensure no difference between volumeMap and disk.
scan(1, 0, 0, 0, 0, 0);
// Make a copy of the block on DEFAULT storage and ensure that it is
// picked up by the scanner.
duplicateBlock(blocks.get(0).getBlock().getBlockId());
scan(2, 1, 0, 0, 0, 0, 1);
// Ensure that the copy on RAM_DISK was deleted.
verifyStorageType(blocks.get(0).getBlock().getBlockId(), false);
scan(1, 0, 0, 0, 0, 0);
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
cluster = null;
}
}
@Test (timeout=600000)
public void testDirectoryScanner() throws Exception {
// Run the test with and without parallel scanning
for (int parallelism = 1; parallelism < 3; parallelism++) {
runTest(parallelism);
}
}
public void runTest(int parallelism) throws Exception {
cluster = new MiniDFSCluster.Builder(CONF).build();
try {
cluster.waitActive();
bpid = cluster.getNamesystem().getBlockPoolId();
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
CONF.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY,
parallelism);
DataNode dataNode = cluster.getDataNodes().get(0);
scanner = new DirectoryScanner(dataNode, fds, CONF);
scanner.setRetainDiffs(true);
// Add files with 100 blocks
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH * 100, false);
long totalBlocks = 100;
// Test1: No difference between volumeMap and disk
scan(100, 0, 0, 0, 0, 0);
// Test2: block metafile is missing
long blockId = deleteMetaFile();
scan(totalBlocks, 1, 1, 0, 0, 1);
verifyGenStamp(blockId, HdfsConstants.GRANDFATHER_GENERATION_STAMP);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test3: block file is missing
blockId = deleteBlockFile();
scan(totalBlocks, 1, 0, 1, 0, 0);
totalBlocks--;
verifyDeletion(blockId);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test4: A block file exists for which there is no metafile and
// a block in memory
blockId = createBlockFile();
totalBlocks++;
scan(totalBlocks, 1, 1, 0, 1, 0);
verifyAddition(blockId, HdfsConstants.GRANDFATHER_GENERATION_STAMP, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test5: A metafile exists for which there is no block file and
// a block in memory
blockId = createMetaFile();
scan(totalBlocks+1, 1, 0, 1, 1, 0);
File metafile = new File(getMetaFile(blockId));
assertTrue(!metafile.exists());
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test6: A block file and metafile exists for which there is no block in
// memory
blockId = createBlockMetaFile();
totalBlocks++;
scan(totalBlocks, 1, 0, 0, 1, 0);
verifyAddition(blockId, DEFAULT_GEN_STAMP, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test7: Delete bunch of metafiles
for (int i = 0; i < 10; i++) {
blockId = deleteMetaFile();
}
scan(totalBlocks, 10, 10, 0, 0, 10);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test8: Delete bunch of block files
for (int i = 0; i < 10; i++) {
blockId = deleteBlockFile();
}
scan(totalBlocks, 10, 0, 10, 0, 0);
totalBlocks -= 10;
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test9: create a bunch of blocks files
for (int i = 0; i < 10 ; i++) {
blockId = createBlockFile();
}
totalBlocks += 10;
scan(totalBlocks, 10, 10, 0, 10, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test10: create a bunch of metafiles
for (int i = 0; i < 10 ; i++) {
blockId = createMetaFile();
}
scan(totalBlocks+10, 10, 0, 10, 10, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test11: create a bunch block files and meta files
for (int i = 0; i < 10 ; i++) {
blockId = createBlockMetaFile();
}
totalBlocks += 10;
scan(totalBlocks, 10, 0, 0, 10, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test12: truncate block files to test block length mismatch
for (int i = 0; i < 10 ; i++) {
truncateBlockFile();
}
scan(totalBlocks, 10, 0, 0, 0, 10);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test13: all the conditions combined
createMetaFile();
createBlockFile();
createBlockMetaFile();
deleteMetaFile();
deleteBlockFile();
truncateBlockFile();
scan(totalBlocks+3, 6, 2, 2, 3, 2);
scan(totalBlocks+1, 0, 0, 0, 0, 0);
// Test14: make sure no throttling is happening
assertTrue("Throttle appears to be engaged",
scanner.timeWaitingMs.get() < 10L);
assertTrue("Report complier threads logged no execution time",
scanner.timeRunningMs.get() > 0L);
// Test15: validate clean shutdown of DirectoryScanner
////assertTrue(scanner.getRunStatus()); //assumes "real" FSDataset, not sim
scanner.shutdown();
assertFalse(scanner.getRunStatus());
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
}
}
/**
* Test that the timeslice throttle limits the report compiler thread's
* execution time correctly. We test by scanning a large block pool and
* comparing the time spent waiting to the time spent running.
*
* The block pool has to be large, or the ratio will be off. The throttle
* allows the report compiler thread to finish its current cycle when
* blocking it, so the ratio will always be a little lower than expected.
* The smaller the block pool, the further off the ratio will be.
*
* @throws Exception thrown on unexpected failure
*/
@Test (timeout=600000)
public void testThrottling() throws Exception {
Configuration conf = new Configuration(CONF);
// We need lots of blocks so the report compiler threads have enough to
// keep them busy while we watch them.
int blocks = 20000;
int maxRetries = 3;
cluster = new MiniDFSCluster.Builder(conf).build();
try {
cluster.waitActive();
bpid = cluster.getNamesystem().getBlockPoolId();
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
conf.setInt(
DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY,
100);
DataNode dataNode = cluster.getDataNodes().get(0);
final int maxBlocksPerFile = (int) DFSConfigKeys
.DFS_NAMENODE_MAX_BLOCKS_PER_FILE_DEFAULT;
int numBlocksToCreate = blocks;
while (numBlocksToCreate > 0) {
final int toCreate = Math.min(maxBlocksPerFile, numBlocksToCreate);
createFile(GenericTestUtils.getMethodName() + numBlocksToCreate,
BLOCK_LENGTH * toCreate, false);
numBlocksToCreate -= toCreate;
}
float ratio = 0.0f;
int retries = maxRetries;
while ((retries > 0) && ((ratio < 7f) || (ratio > 10f))) {
scanner = new DirectoryScanner(dataNode, fds, conf);
ratio = runThrottleTest(blocks);
retries -= 1;
}
// Waiting should be about 9x running.
LOG.info("RATIO: " + ratio);
assertTrue("Throttle is too restrictive", ratio <= 10f);
assertTrue("Throttle is too permissive", ratio >= 7f);
// Test with a different limit
conf.setInt(
DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY,
200);
ratio = 0.0f;
retries = maxRetries;
while ((retries > 0) && ((ratio < 2.75f) || (ratio > 4.5f))) {
scanner = new DirectoryScanner(dataNode, fds, conf);
ratio = runThrottleTest(blocks);
retries -= 1;
}
// Waiting should be about 4x running.
LOG.info("RATIO: " + ratio);
assertTrue("Throttle is too restrictive", ratio <= 4.5f);
assertTrue("Throttle is too permissive", ratio >= 2.75f);
// Test with more than 1 thread
conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY, 3);
conf.setInt(
DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY,
100);
ratio = 0.0f;
retries = maxRetries;
while ((retries > 0) && ((ratio < 7f) || (ratio > 10f))) {
scanner = new DirectoryScanner(dataNode, fds, conf);
ratio = runThrottleTest(blocks);
retries -= 1;
}
// Waiting should be about 9x running.
LOG.info("RATIO: " + ratio);
assertTrue("Throttle is too restrictive", ratio <= 10f);
assertTrue("Throttle is too permissive", ratio >= 7f);
// Test with no limit
scanner = new DirectoryScanner(dataNode, fds, CONF);
scanner.setRetainDiffs(true);
scan(blocks, 0, 0, 0, 0, 0);
scanner.shutdown();
assertFalse(scanner.getRunStatus());
assertTrue("Throttle appears to be engaged",
scanner.timeWaitingMs.get() < 10L);
assertTrue("Report complier threads logged no execution time",
scanner.timeRunningMs.get() > 0L);
// Test with a 1ms limit. This also tests whether the scanner can be
// shutdown cleanly in mid stride.
conf.setInt(
DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY,
1);
ratio = 0.0f;
retries = maxRetries;
ScheduledExecutorService interruptor =
Executors.newScheduledThreadPool(maxRetries);
try {
while ((retries > 0) && (ratio < 10)) {
scanner = new DirectoryScanner(dataNode, fds, conf);
scanner.setRetainDiffs(true);
final AtomicLong nowMs = new AtomicLong();
// Stop the scanner after 2 seconds because otherwise it will take an
// eternity to complete it's run
interruptor.schedule(new Runnable() {
@Override
public void run() {
nowMs.set(Time.monotonicNow());
scanner.shutdown();
}
}, 2L, TimeUnit.SECONDS);
scanner.reconcile();
assertFalse(scanner.getRunStatus());
long finalMs = nowMs.get();
// If the scan didn't complete before the shutdown was run, check
// that the shutdown was timely
if (finalMs > 0) {
LOG.info("Scanner took " + (Time.monotonicNow() - finalMs)
+ "ms to shutdown");
assertTrue("Scanner took too long to shutdown",
Time.monotonicNow() - finalMs < 1000L);
}
ratio =
(float)scanner.timeWaitingMs.get() / scanner.timeRunningMs.get();
retries -= 1;
}
} finally {
interruptor.shutdown();
}
// We just want to test that it waits a lot, but it also runs some
LOG.info("RATIO: " + ratio);
assertTrue("Throttle is too permissive",
ratio > 10);
assertTrue("Report complier threads logged no execution time",
scanner.timeRunningMs.get() > 0L);
// Test with a 0 limit, i.e. disabled
conf.setInt(
DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY,
0);
scanner = new DirectoryScanner(dataNode, fds, conf);
scanner.setRetainDiffs(true);
scan(blocks, 0, 0, 0, 0, 0);
scanner.shutdown();
assertFalse(scanner.getRunStatus());
assertTrue("Throttle appears to be engaged",
scanner.timeWaitingMs.get() < 10L);
assertTrue("Report complier threads logged no execution time",
scanner.timeRunningMs.get() > 0L);
// Test with a 1000 limit, i.e. disabled
conf.setInt(
DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY,
1000);
scanner = new DirectoryScanner(dataNode, fds, conf);
scanner.setRetainDiffs(true);
scan(blocks, 0, 0, 0, 0, 0);
scanner.shutdown();
assertFalse(scanner.getRunStatus());
assertTrue("Throttle appears to be engaged",
scanner.timeWaitingMs.get() < 10L);
assertTrue("Report complier threads logged no execution time",
scanner.timeRunningMs.get() > 0L);
// Test that throttle works from regular start
conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY, 1);
conf.setInt(
DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY,
10);
conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY,
1);
scanner = new DirectoryScanner(dataNode, fds, conf);
scanner.setRetainDiffs(true);
scanner.start();
int count = 50;
while ((count > 0) && (scanner.timeWaitingMs.get() < 500L)) {
Thread.sleep(100L);
count -= 1;
}
scanner.shutdown();
assertFalse(scanner.getRunStatus());
assertTrue("Throttle does not appear to be engaged", count > 0);
} finally {
cluster.shutdown();
}
}
private float runThrottleTest(int blocks) throws IOException {
scanner.setRetainDiffs(true);
scan(blocks, 0, 0, 0, 0, 0);
scanner.shutdown();
assertFalse(scanner.getRunStatus());
return (float)scanner.timeWaitingMs.get() / scanner.timeRunningMs.get();
}
private void verifyAddition(long blockId, long genStamp, long size) {
final ReplicaInfo replicainfo;
replicainfo = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
assertNotNull(replicainfo);
// Added block has the same file as the one created by the test
File file = new File(getBlockFile(blockId));
assertEquals(file.getName(),
FsDatasetTestUtil.getFile(fds, bpid, blockId).getName());
// Generation stamp is same as that of created file
assertEquals(genStamp, replicainfo.getGenerationStamp());
// File size matches
assertEquals(size, replicainfo.getNumBytes());
}
private void verifyDeletion(long blockId) {
// Ensure block does not exist in memory
assertNull(FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId));
}
private void verifyGenStamp(long blockId, long genStamp) {
final ReplicaInfo memBlock;
memBlock = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
assertNotNull(memBlock);
assertEquals(genStamp, memBlock.getGenerationStamp());
}
private void verifyStorageType(long blockId, boolean expectTransient) {
final ReplicaInfo memBlock;
memBlock = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
assertNotNull(memBlock);
assertThat(memBlock.getVolume().isTransientStorage(), is(expectTransient));
}
private static class TestFsVolumeSpi implements FsVolumeSpi {
@Override
public String[] getBlockPoolList() {
return new String[0];
}
@Override
public FsVolumeReference obtainReference() throws ClosedChannelException {
return null;
}
@Override
public long getAvailable() throws IOException {
return 0;
}
public File getFinalizedDir(String bpid) throws IOException {
return new File("/base/current/" + bpid + "/finalized");
}
@Override
public StorageType getStorageType() {
return StorageType.DEFAULT;
}
@Override
public String getStorageID() {
return "";
}
@Override
public boolean isTransientStorage() {
return false;
}
@Override
public void reserveSpaceForReplica(long bytesToReserve) {
}
@Override
public void releaseReservedSpace(long bytesToRelease) {
}
@Override
public void releaseLockedMemory(long bytesToRelease) {
}
@Override
public BlockIterator newBlockIterator(String bpid, String name) {
throw new UnsupportedOperationException();
}
@Override
public BlockIterator loadBlockIterator(String bpid, String name)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public FsDatasetSpi getDataset() {
throw new UnsupportedOperationException();
}
@Override
public StorageLocation getStorageLocation() {
return null;
}
@Override
public URI getBaseURI() {
return (new File("/base")).toURI();
}
@Override
public DF getUsageStats(Configuration conf) {
return null;
}
@Override
public byte[] loadLastPartialChunkChecksum(
File blockFile, File metaFile) throws IOException {
return null;
}
@Override
public LinkedList<ScanInfo> compileReport(String bpid,
LinkedList<ScanInfo> report, ReportCompiler reportCompiler)
throws InterruptedException, IOException {
return null;
}
@Override
public FileIoProvider getFileIoProvider() {
return null;
}
@Override
public DataNodeVolumeMetrics getMetrics() {
return null;
}
@Override
public VolumeCheckResult check(VolumeCheckContext context)
throws Exception {
return VolumeCheckResult.HEALTHY;
}
}
private final static TestFsVolumeSpi TEST_VOLUME = new TestFsVolumeSpi();
private final static String BPID_1 = "BP-783049782-127.0.0.1-1370971773491";
private final static String BPID_2 = "BP-367845636-127.0.0.1-5895645674231";
void testScanInfoObject(long blockId, File blockFile, File metaFile)
throws Exception {
FsVolumeSpi.ScanInfo scanInfo =
new FsVolumeSpi.ScanInfo(blockId, blockFile, metaFile, TEST_VOLUME);
assertEquals(blockId, scanInfo.getBlockId());
if (blockFile != null) {
assertEquals(blockFile.getAbsolutePath(),
scanInfo.getBlockFile().getAbsolutePath());
} else {
assertNull(scanInfo.getBlockFile());
}
if (metaFile != null) {
assertEquals(metaFile.getAbsolutePath(),
scanInfo.getMetaFile().getAbsolutePath());
} else {
assertNull(scanInfo.getMetaFile());
}
assertEquals(TEST_VOLUME, scanInfo.getVolume());
}
void testScanInfoObject(long blockId) throws Exception {
FsVolumeSpi.ScanInfo scanInfo =
new FsVolumeSpi.ScanInfo(blockId, null, null, null);
assertEquals(blockId, scanInfo.getBlockId());
assertNull(scanInfo.getBlockFile());
assertNull(scanInfo.getMetaFile());
}
@Test(timeout=120000)
public void TestScanInfo() throws Exception {
testScanInfoObject(123,
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123"),
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123__1001.meta"));
testScanInfoObject(464,
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123"),
null);
testScanInfoObject(523,
null,
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123__1009.meta"));
testScanInfoObject(789,
null,
null);
testScanInfoObject(456);
testScanInfoObject(123,
new File(TEST_VOLUME.getFinalizedDir(BPID_2).getAbsolutePath(),
"blk_567"),
new File(TEST_VOLUME.getFinalizedDir(BPID_2).getAbsolutePath(),
"blk_567__1004.meta"));
}
/**
* Test the behavior of exception handling during directory scan operation.
* Directory scanner shouldn't abort the scan on every directory just because
* one had an error.
*/
@Test(timeout = 60000)
public void testExceptionHandlingWhileDirectoryScan() throws Exception {
cluster = new MiniDFSCluster.Builder(CONF).build();
try {
cluster.waitActive();
bpid = cluster.getNamesystem().getBlockPoolId();
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
CONF.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY, 1);
DataNode dataNode = cluster.getDataNodes().get(0);
// Add files with 2 blocks
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH * 2, false);
// Inject error on #getFinalizedDir() so that ReportCompiler#call() will
// hit exception while preparing the block info report list.
List<FsVolumeSpi> volumes = new ArrayList<>();
Iterator<FsVolumeSpi> iterator = fds.getFsVolumeReferences().iterator();
while (iterator.hasNext()) {
FsVolumeImpl volume = (FsVolumeImpl) iterator.next();
FsVolumeImpl spy = Mockito.spy(volume);
Mockito.doThrow(new IOException("Error while getFinalizedDir"))
.when(spy).getFinalizedDir(volume.getBlockPoolList()[0]);
volumes.add(spy);
}
FsVolumeReferences volReferences = new FsVolumeReferences(volumes);
FsDatasetSpi<? extends FsVolumeSpi> spyFds = Mockito.spy(fds);
Mockito.doReturn(volReferences).when(spyFds).getFsVolumeReferences();
scanner = new DirectoryScanner(dataNode, spyFds, CONF);
scanner.setRetainDiffs(true);
scanner.reconcile();
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
}
}
}
| |
/*
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.beans;
import java.io.Serializable;
import java.io.ObjectStreamField;
import java.io.ObjectOutputStream;
import java.io.ObjectInputStream;
import java.io.IOException;
import java.util.Hashtable;
import java.util.Map.Entry;
/**
* This is a utility class that can be used by beans that support bound
* properties. It manages a list of listeners and dispatches
* {@link PropertyChangeEvent}s to them. You can use an instance of this class
* as a member field of your bean and delegate these types of work to it.
* The {@link PropertyChangeListener} can be registered for all properties
* or for a property specified by name.
* <p>
* Here is an example of {@code PropertyChangeSupport} usage that follows
* the rules and recommendations laid out in the JavaBeans™ specification:
* <pre>
* public class MyBean {
* private final PropertyChangeSupport pcs = new PropertyChangeSupport(this);
*
* public void addPropertyChangeListener(PropertyChangeListener listener) {
* this.pcs.addPropertyChangeListener(listener);
* }
*
* public void removePropertyChangeListener(PropertyChangeListener listener) {
* this.pcs.removePropertyChangeListener(listener);
* }
*
* private String value;
*
* public String getValue() {
* return this.value;
* }
*
* public void setValue(String newValue) {
* String oldValue = this.value;
* this.value = newValue;
* this.pcs.firePropertyChange("value", oldValue, newValue);
* }
*
* [...]
* }
* </pre>
* <p>
* A {@code PropertyChangeSupport} instance is thread-safe.
* <p>
* This class is serializable. When it is serialized it will save
* (and restore) any listeners that are themselves serializable. Any
* non-serializable listeners will be skipped during serialization.
*
* @see VetoableChangeSupport
*/
public class PropertyChangeSupport implements Serializable {
private PropertyChangeListenerMap map = new PropertyChangeListenerMap();
/**
* Constructs a <code>PropertyChangeSupport</code> object.
*
* @param sourceBean The bean to be given as the source for any events.
*/
public PropertyChangeSupport(Object sourceBean) {
if (sourceBean == null) {
throw new NullPointerException();
}
source = sourceBean;
}
/**
* Add a PropertyChangeListener to the listener list.
* The listener is registered for all properties.
* The same listener object may be added more than once, and will be called
* as many times as it is added.
* If <code>listener</code> is null, no exception is thrown and no action
* is taken.
*
* @param listener The PropertyChangeListener to be added
*/
public void addPropertyChangeListener(PropertyChangeListener listener) {
if (listener == null) {
return;
}
if (listener instanceof PropertyChangeListenerProxy) {
PropertyChangeListenerProxy proxy =
(PropertyChangeListenerProxy)listener;
// Call two argument add method.
addPropertyChangeListener(proxy.getPropertyName(),
proxy.getListener());
} else {
this.map.add(null, listener);
}
}
/**
* Remove a PropertyChangeListener from the listener list.
* This removes a PropertyChangeListener that was registered
* for all properties.
* If <code>listener</code> was added more than once to the same event
* source, it will be notified one less time after being removed.
* If <code>listener</code> is null, or was never added, no exception is
* thrown and no action is taken.
*
* @param listener The PropertyChangeListener to be removed
*/
public void removePropertyChangeListener(PropertyChangeListener listener) {
if (listener == null) {
return;
}
if (listener instanceof PropertyChangeListenerProxy) {
PropertyChangeListenerProxy proxy =
(PropertyChangeListenerProxy)listener;
// Call two argument remove method.
removePropertyChangeListener(proxy.getPropertyName(),
proxy.getListener());
} else {
this.map.remove(null, listener);
}
}
/**
* Returns an array of all the listeners that were added to the
* PropertyChangeSupport object with addPropertyChangeListener().
* <p>
* If some listeners have been added with a named property, then
* the returned array will be a mixture of PropertyChangeListeners
* and <code>PropertyChangeListenerProxy</code>s. If the calling
* method is interested in distinguishing the listeners then it must
* test each element to see if it's a
* <code>PropertyChangeListenerProxy</code>, perform the cast, and examine
* the parameter.
*
* <pre>{@code
* PropertyChangeListener[] listeners = bean.getPropertyChangeListeners();
* for (int i = 0; i < listeners.length; i++) {
* if (listeners[i] instanceof PropertyChangeListenerProxy) {
* PropertyChangeListenerProxy proxy =
* (PropertyChangeListenerProxy)listeners[i];
* if (proxy.getPropertyName().equals("foo")) {
* // proxy is a PropertyChangeListener which was associated
* // with the property named "foo"
* }
* }
* }
* }</pre>
*
* @see PropertyChangeListenerProxy
* @return all of the <code>PropertyChangeListeners</code> added or an
* empty array if no listeners have been added
* @since 1.4
*/
public PropertyChangeListener[] getPropertyChangeListeners() {
return this.map.getListeners();
}
/**
* Add a PropertyChangeListener for a specific property. The listener
* will be invoked only when a call on firePropertyChange names that
* specific property.
* The same listener object may be added more than once. For each
* property, the listener will be invoked the number of times it was added
* for that property.
* If <code>propertyName</code> or <code>listener</code> is null, no
* exception is thrown and no action is taken.
*
* @param propertyName The name of the property to listen on.
* @param listener The PropertyChangeListener to be added
*/
public void addPropertyChangeListener(
String propertyName,
PropertyChangeListener listener) {
if (listener == null || propertyName == null) {
return;
}
listener = this.map.extract(listener);
if (listener != null) {
this.map.add(propertyName, listener);
}
}
/**
* Remove a PropertyChangeListener for a specific property.
* If <code>listener</code> was added more than once to the same event
* source for the specified property, it will be notified one less time
* after being removed.
* If <code>propertyName</code> is null, no exception is thrown and no
* action is taken.
* If <code>listener</code> is null, or was never added for the specified
* property, no exception is thrown and no action is taken.
*
* @param propertyName The name of the property that was listened on.
* @param listener The PropertyChangeListener to be removed
*/
public void removePropertyChangeListener(
String propertyName,
PropertyChangeListener listener) {
if (listener == null || propertyName == null) {
return;
}
listener = this.map.extract(listener);
if (listener != null) {
this.map.remove(propertyName, listener);
}
}
/**
* Returns an array of all the listeners which have been associated
* with the named property.
*
* @param propertyName The name of the property being listened to
* @return all of the <code>PropertyChangeListeners</code> associated with
* the named property. If no such listeners have been added,
* or if <code>propertyName</code> is null, an empty array is
* returned.
* @since 1.4
*/
public PropertyChangeListener[] getPropertyChangeListeners(String propertyName) {
return this.map.getListeners(propertyName);
}
/**
* Reports a bound property update to listeners
* that have been registered to track updates of
* all properties or a property with the specified name.
* <p>
* No event is fired if old and new values are equal and non-null.
* <p>
* This is merely a convenience wrapper around the more general
* {@link #firePropertyChange(PropertyChangeEvent)} method.
*
* @param propertyName the programmatic name of the property that was changed
* @param oldValue the old value of the property
* @param newValue the new value of the property
*/
public void firePropertyChange(String propertyName, Object oldValue, Object newValue) {
if (oldValue == null || newValue == null || !oldValue.equals(newValue)) {
firePropertyChange(new PropertyChangeEvent(this.source, propertyName, oldValue, newValue));
}
}
/**
* Reports an integer bound property update to listeners
* that have been registered to track updates of
* all properties or a property with the specified name.
* <p>
* No event is fired if old and new values are equal.
* <p>
* This is merely a convenience wrapper around the more general
* {@link #firePropertyChange(String, Object, Object)} method.
*
* @param propertyName the programmatic name of the property that was changed
* @param oldValue the old value of the property
* @param newValue the new value of the property
*/
public void firePropertyChange(String propertyName, int oldValue, int newValue) {
if (oldValue != newValue) {
firePropertyChange(propertyName, Integer.valueOf(oldValue), Integer.valueOf(newValue));
}
}
/**
* Reports a boolean bound property update to listeners
* that have been registered to track updates of
* all properties or a property with the specified name.
* <p>
* No event is fired if old and new values are equal.
* <p>
* This is merely a convenience wrapper around the more general
* {@link #firePropertyChange(String, Object, Object)} method.
*
* @param propertyName the programmatic name of the property that was changed
* @param oldValue the old value of the property
* @param newValue the new value of the property
*/
public void firePropertyChange(String propertyName, boolean oldValue, boolean newValue) {
if (oldValue != newValue) {
firePropertyChange(propertyName, Boolean.valueOf(oldValue), Boolean.valueOf(newValue));
}
}
/**
* Fires a property change event to listeners
* that have been registered to track updates of
* all properties or a property with the specified name.
* <p>
* No event is fired if the given event's old and new values are equal and non-null.
*
* @param event the {@code PropertyChangeEvent} to be fired
*/
public void firePropertyChange(PropertyChangeEvent event) {
Object oldValue = event.getOldValue();
Object newValue = event.getNewValue();
if (oldValue == null || newValue == null || !oldValue.equals(newValue)) {
String name = event.getPropertyName();
PropertyChangeListener[] common = this.map.get(null);
PropertyChangeListener[] named = (name != null)
? this.map.get(name)
: null;
fire(common, event);
fire(named, event);
}
}
private static void fire(PropertyChangeListener[] listeners, PropertyChangeEvent event) {
if (listeners != null) {
for (PropertyChangeListener listener : listeners) {
listener.propertyChange(event);
}
}
}
/**
* Reports a bound indexed property update to listeners
* that have been registered to track updates of
* all properties or a property with the specified name.
* <p>
* No event is fired if old and new values are equal and non-null.
* <p>
* This is merely a convenience wrapper around the more general
* {@link #firePropertyChange(PropertyChangeEvent)} method.
*
* @param propertyName the programmatic name of the property that was changed
* @param index the index of the property element that was changed
* @param oldValue the old value of the property
* @param newValue the new value of the property
* @since 1.5
*/
public void fireIndexedPropertyChange(String propertyName, int index, Object oldValue, Object newValue) {
if (oldValue == null || newValue == null || !oldValue.equals(newValue)) {
firePropertyChange(new IndexedPropertyChangeEvent(source, propertyName, oldValue, newValue, index));
}
}
/**
* Reports an integer bound indexed property update to listeners
* that have been registered to track updates of
* all properties or a property with the specified name.
* <p>
* No event is fired if old and new values are equal.
* <p>
* This is merely a convenience wrapper around the more general
* {@link #fireIndexedPropertyChange(String, int, Object, Object)} method.
*
* @param propertyName the programmatic name of the property that was changed
* @param index the index of the property element that was changed
* @param oldValue the old value of the property
* @param newValue the new value of the property
* @since 1.5
*/
public void fireIndexedPropertyChange(String propertyName, int index, int oldValue, int newValue) {
if (oldValue != newValue) {
fireIndexedPropertyChange(propertyName, index, Integer.valueOf(oldValue), Integer.valueOf(newValue));
}
}
/**
* Reports a boolean bound indexed property update to listeners
* that have been registered to track updates of
* all properties or a property with the specified name.
* <p>
* No event is fired if old and new values are equal.
* <p>
* This is merely a convenience wrapper around the more general
* {@link #fireIndexedPropertyChange(String, int, Object, Object)} method.
*
* @param propertyName the programmatic name of the property that was changed
* @param index the index of the property element that was changed
* @param oldValue the old value of the property
* @param newValue the new value of the property
* @since 1.5
*/
public void fireIndexedPropertyChange(String propertyName, int index, boolean oldValue, boolean newValue) {
if (oldValue != newValue) {
fireIndexedPropertyChange(propertyName, index, Boolean.valueOf(oldValue), Boolean.valueOf(newValue));
}
}
/**
* Check if there are any listeners for a specific property, including
* those registered on all properties. If <code>propertyName</code>
* is null, only check for listeners registered on all properties.
*
* @param propertyName the property name.
* @return true if there are one or more listeners for the given property
*/
public boolean hasListeners(String propertyName) {
return this.map.hasListeners(propertyName);
}
/**
* @serialData Null terminated list of <code>PropertyChangeListeners</code>.
* <p>
* At serialization time we skip non-serializable listeners and
* only serialize the serializable listeners.
*/
private void writeObject(ObjectOutputStream s) throws IOException {
Hashtable<String, PropertyChangeSupport> children = null;
PropertyChangeListener[] listeners = null;
synchronized (this.map) {
for (Entry<String, PropertyChangeListener[]> entry : this.map.getEntries()) {
String property = entry.getKey();
if (property == null) {
listeners = entry.getValue();
} else {
if (children == null) {
children = new Hashtable<>();
}
PropertyChangeSupport pcs = new PropertyChangeSupport(this.source);
pcs.map.set(null, entry.getValue());
children.put(property, pcs);
}
}
}
ObjectOutputStream.PutField fields = s.putFields();
fields.put("children", children);
fields.put("source", this.source);
fields.put("propertyChangeSupportSerializedDataVersion", 2);
s.writeFields();
if (listeners != null) {
for (PropertyChangeListener l : listeners) {
if (l instanceof Serializable) {
s.writeObject(l);
}
}
}
s.writeObject(null);
}
private void readObject(ObjectInputStream s) throws ClassNotFoundException, IOException {
this.map = new PropertyChangeListenerMap();
ObjectInputStream.GetField fields = s.readFields();
@SuppressWarnings("unchecked")
Hashtable<String, PropertyChangeSupport> children = (Hashtable<String, PropertyChangeSupport>) fields.get("children", null);
this.source = fields.get("source", null);
fields.get("propertyChangeSupportSerializedDataVersion", 2);
Object listenerOrNull;
while (null != (listenerOrNull = s.readObject())) {
this.map.add(null, (PropertyChangeListener)listenerOrNull);
}
if (children != null) {
for (Entry<String, PropertyChangeSupport> entry : children.entrySet()) {
for (PropertyChangeListener listener : entry.getValue().getPropertyChangeListeners()) {
this.map.add(entry.getKey(), listener);
}
}
}
}
/**
* The object to be provided as the "source" for any generated events.
*/
private Object source;
/**
* @serialField children Hashtable
* @serialField source Object
* @serialField propertyChangeSupportSerializedDataVersion int
*/
private static final ObjectStreamField[] serialPersistentFields = {
new ObjectStreamField("children", Hashtable.class),
new ObjectStreamField("source", Object.class),
new ObjectStreamField("propertyChangeSupportSerializedDataVersion", Integer.TYPE)
};
/**
* Serialization version ID, so we're compatible with JDK 1.1
*/
static final long serialVersionUID = 6401253773779951803L;
/**
* This is a {@link ChangeListenerMap ChangeListenerMap} implementation
* that works with {@link PropertyChangeListener PropertyChangeListener} objects.
*/
private static final class PropertyChangeListenerMap extends ChangeListenerMap<PropertyChangeListener> {
private static final PropertyChangeListener[] EMPTY = {};
/**
* Creates an array of {@link PropertyChangeListener PropertyChangeListener} objects.
* This method uses the same instance of the empty array
* when {@code length} equals {@code 0}.
*
* @param length the array length
* @return an array with specified length
*/
@Override
protected PropertyChangeListener[] newArray(int length) {
return (0 < length)
? new PropertyChangeListener[length]
: EMPTY;
}
/**
* Creates a {@link PropertyChangeListenerProxy PropertyChangeListenerProxy}
* object for the specified property.
*
* @param name the name of the property to listen on
* @param listener the listener to process events
* @return a {@code PropertyChangeListenerProxy} object
*/
@Override
protected PropertyChangeListener newProxy(String name, PropertyChangeListener listener) {
return new PropertyChangeListenerProxy(name, listener);
}
/**
* {@inheritDoc}
*/
public final PropertyChangeListener extract(PropertyChangeListener listener) {
while (listener instanceof PropertyChangeListenerProxy) {
listener = ((PropertyChangeListenerProxy) listener).getListener();
}
return listener;
}
}
}
| |
/*
* Copyright 2014 Lukas Stratmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package data;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.ParseException;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import util.DateComparator;
/**
* @author lumpiluk
*
*/
public class Person extends HotelData implements Cloneable {
public static final String SQL_TABLE_NAME = "people";
private static final String SQL_CREATE = "CREATE TABLE IF NOT EXISTS "
+ SQL_TABLE_NAME + " (person_id INTEGER PRIMARY KEY, "
+ "address INTEGER, "
+ "title TEXT, "
+ "first_names TEXT, "
+ "surnames TEXT NOT NULL, "
+ "birthday TEXT, "
+ "food_memo TEXT)";
private static final String SQL_INSERT = "INSERT INTO " + SQL_TABLE_NAME
+ " (address, title, first_names, surnames, birthday, food_memo) "
+ "VALUES (?, ?, ?, ?, ?, ?)";
private static final String SQL_UPDATE = "UPDATE " + SQL_TABLE_NAME
+ " SET address = ?, title = ?, first_names = ?, surnames = ?, "
+ "birthday = ?, food_memo = ? "
+ "WHERE person_id = ?";
private static final String SQL_DELETE = "DELETE FROM " + SQL_TABLE_NAME
+ " WHERE person_id = ?";
private static final String SQL_SELECT = "SELECT * FROM " + SQL_TABLE_NAME
+ " WHERE person_id = ?";
private long id;
private StringProperty title = new SimpleStringProperty();
private StringProperty firstNames = new SimpleStringProperty();
private StringProperty surnames = new SimpleStringProperty();
private Date birthday;
private StringProperty foodMemo = new SimpleStringProperty();
private Address address;
public Person(final Connection con) {
super(con);
this.setId(0);
}
/**
* @return the id
*/
public long getId() {
return id;
}
/**
* @param id the new ID
* @throws IllegalArgumentException if id is less than or equal to 0
*/
public void setId(long id) throws IllegalArgumentException {
if (id <= 0)
throw new IllegalArgumentException();
this.id = id;
}
/**
* Check whether id has been initialized.<br />
* Used in updateInsert().
* @return false iff the id has not been set yet.
*/
public boolean isIdSet() { // TODO: do actually use this!
return id == 0;
}
/**
* Loads the associated address from the database.
* @return the address associated with this Person. May be null.
* @throws SQLException
*/
public Address getAddress() throws SQLException {
/*Address a = new Address(con);
try {
a.fromDbAtId(addressId);
} catch (NoSuchElementException e) {
return null;
}*/
return this.address;
}
/** @return the address id, 0 if not set */
private long getAddressId() {
if (this.address == null) {
return 0;
}
return this.address.getId();
}
/**
* Will create an empty address for this person object with only the id set. // TODO: right choice?
* @param addressId the address id to set
*/
private void setAddressId(long addressId) {
//this.addressId = addressId;
Address a = new Address(con);
a.setId(addressId);
}
/** @param address the address to set (will only save the id) */
public void setAddress(Address address) {
//setAddressId(address.getId());
this.address = address;
}
/** @param id the id to set */
private void setId(int id) { this.id = id; }
/** @return the title */
public String getTitle() { return title.get(); }
/** @param title the title to set */
public void setTitle(String title) { this.title.set(title); }
public StringProperty titleProperty() { return title; }
/** @return the firstNames */
public String getFirstNames() { return firstNames.get(); }
/** @param firstNames the firstNames to set */
public void setFirstNames(String firstNames) { this.firstNames.set(firstNames); }
public StringProperty firstNamesProperty() { return firstNames; }
/** @return the surnames */
public String getSurnames() { return surnames.get(); }
/** @param surnames the surnames to set */
public void setSurnames(String surnames) { this.surnames.set(surnames); }
public StringProperty surnamesProperty() { return surnames; }
/** @return the birthday */
public Date getBirthday() { return birthday; }
/** @param birthday the birthday to set */
public void setBirthday(Date birthday) { this.birthday = birthday; }
public void setBirthday(String isoDateCreated) throws ParseException {
if (isoDateCreated != null && !isoDateCreated.trim().equals("")) {
this.birthday = new Date(DateComparator.getDateFormat()
.parse(isoDateCreated).getTime());
} else {
this.birthday = null;
}
}
/** @return the foodMemo */
public String getFoodMemo() { return foodMemo.get(); }
/** @param foodMemo the foodMemo to set */
public void setFoodMemo(String foodMemo) { this.foodMemo.set(foodMemo); }
public StringProperty foodMemoProperty() { return foodMemo; }
@Override
public String toString() {
return String.format("%1$s %2$s %3$s", getTitle(), getFirstNames(), getSurnames());
}
@Override protected Object clone() throws CloneNotSupportedException {
Person p = new Person(con);
p.setAddress(this.address);
p.setBirthday((Date) this.birthday.clone());
p.setFirstNames(this.getFirstNames());
p.setSurnames(this.getSurnames());
p.setFoodMemo(this.getFoodMemo());
p.setId(this.id);
p.setTitle(this.getTitle());
return p;
}
/**
* Used in fromDbAtId and getBatch.
* @param p The Person the data of which will be prepared.
* @param rs The result set.
* @throws SQLException
*/
private static void prepareDataFromResultSet(final Person p, final ResultSet rs)
throws SQLException {
p.setId(rs.getLong("person_id")); // works although private :)
p.setAddressId(rs.getLong("address")); // 0 if SQL NULL
try {
p.setBirthday(rs.getString("birthday"));
} catch (ParseException e) {
Date tmp = null; p.setBirthday(tmp);
}
p.setFirstNames(rs.getString("first_names"));
p.setFoodMemo(rs.getString("food_memo"));
p.setSurnames(rs.getString("surnames"));
p.setTitle(rs.getString("title"));
}
public void prepareDataFromResultSet(final ResultSet rs)
throws SQLException {
Person.prepareDataFromResultSet(this, rs);
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
// ignore properties other than id, might've already changed as user edited the person
return obj != null && obj instanceof Person &&
((Person) obj).getId() == this.getId();
}
/**
* {@inheritDoc}
*/
@Override
public boolean fromDbAtId(final long d) throws NoSuchElementException,
SQLException {
boolean success = false;
try (PreparedStatement stmt = con.prepareStatement(SQL_SELECT)) {
stmt.setLong(1, this.getId());
try (ResultSet rs = stmt.executeQuery()) {
if (!rs.first()) {
throw new NoSuchElementException();
}
prepareDataFromResultSet(this, rs);
success = true;
}
}
return success;
}
/**
* {@inheritDoc}
* @throws SQLException
*/
@Override
public Iterable<Person> getBatch(List<Long> indices) throws SQLException {
if (indices == null)
throw new IllegalArgumentException();
List<Person> resultList = new LinkedList<Person>();
if (!indices.isEmpty()) {
StringBuilder sql = new StringBuilder();
sql.append("SELECT * FROM " + SQL_TABLE_NAME + " WHERE person_id IN (");
for (@SuppressWarnings("unused") Long id : indices) {
sql.append("?,");
}
sql.deleteCharAt(sql.length() - 1); // remove last comma
sql.append(")");
try (PreparedStatement stmt = con.prepareStatement(
sql.toString())) {
int i = 0;
for (Long id : indices) {
i++;
stmt.setLong(i, id);
}
ResultSet rs = stmt.executeQuery();
// rs auto-closes with stmt, no try-with-resources necessary
while (rs.next()) {
Person p = new Person(con);
prepareDataFromResultSet(p, rs);
resultList.add(p);
}
}
}
return resultList;
}
/**
* {@inheritDoc}
* Updates via SQL command INSERT OR REPLACE, i.e. if this person
* has been deleted from the table before calling updateDB() it will be
* reinserted.
* @throws SQLException
*/
@Override public void updateDb() throws SQLException {
try (PreparedStatement stmt = con.prepareStatement(SQL_UPDATE)) {
if (getAddressId() == 0) {
stmt.setNull(1, java.sql.Types.INTEGER);
} else {
stmt.setLong(1, getAddressId());
}
stmt.setString(2, getTitle());
stmt.setString(3, getFirstNames());
stmt.setString(4, getSurnames());
if (getBirthday() != null) {
stmt.setString(5, DateComparator.getDateFormat().format(getBirthday()));
} else {
stmt.setNull(5, java.sql.Types.NULL);
}
stmt.setString(6, getFoodMemo());
stmt.setLong(7, getId());
stmt.executeUpdate();
}
}
@Override public void insertIntoDb() throws SQLException {
try (PreparedStatement stmt = con.prepareStatement(SQL_INSERT,
Statement.RETURN_GENERATED_KEYS)) {
if (getAddressId() == 0) {
stmt.setNull(1, java.sql.Types.INTEGER);
} else {
stmt.setLong(1, getAddressId());
}
stmt.setString(2, getTitle());
stmt.setString(3, getFirstNames());
stmt.setString(4, getSurnames());
if (getBirthday() != null) {
stmt.setString(5, DateComparator.getDateFormat().format(getBirthday()));
} else {
stmt.setNull(5, java.sql.Types.NULL);
}
stmt.setString(6, getFoodMemo());
stmt.executeUpdate();
this.setId(stmt.getGeneratedKeys().getLong(1)); // get newly assigned id
}
}
/**
* If isIdSet() returns true, this person will be updated, otherwise it
* will be inserted as a new person into the database.
* @throws SQLException
*/
public void updateInsert() throws SQLException {
if (isIdSet()) {
updateDb();
} else {
insertIntoDb();
}
}
@Override public void deleteFromDb() throws SQLException {
try(PreparedStatement stmt = con.prepareStatement(SQL_DELETE)) {
stmt.setLong(1, getId());
stmt.executeUpdate();
}
}
@Override public void createTables() throws SQLException {
try(Statement stmt = con.createStatement()) {
stmt.executeUpdate(SQL_CREATE);
}
}
@Override public String[] getPropertyIdentifiers() {
String[] idents = { "title", "firstNames", "surnames", "foodMemo" };
return idents;
}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.android.gui.call;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.util.*;
import org.jitsi.*;
import org.jitsi.service.osgi.*;
import android.content.*;
import android.graphics.*;
import android.os.*;
import android.view.*;
import android.widget.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.util.call.*;
/**
* The <tt>ReceivedCallActivity</tt> is the activity that corresponds to the
* screen shown on incoming call.
*
* @author Yana Stamcheva
* @author Pawel Domas
*/
public class ReceivedCallActivity
extends OSGiActivity
implements CallChangeListener
{
/**
* The logger
*/
private final static Logger logger =
Logger.getLogger(ReceivedCallActivity.class);
/**
* The identifier of the call.
*/
private String callIdentifier;
/**
* The corresponding call.
*/
private Call call;
/**
* {@inheritDoc}
*/
public void onAttachedToWindow()
{
getWindow().addFlags(
WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON
+ WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
+ WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
+ WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD);
}
/**
* Called when the activity is starting. Initializes the call identifier.
*
* @param savedInstanceState If the activity is being re-initialized after
* previously being shut down then this Bundle contains the data it most
* recently supplied in onSaveInstanceState(Bundle).
* Note: Otherwise it is null.
*/
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.received_call);
TextView displayNameView
= (TextView) findViewById(R.id.calleeDisplayName);
TextView addressView
= (TextView) findViewById(R.id.calleeAddress);
ImageView avatarView
= (ImageView) findViewById(R.id.calleeAvatar);
Bundle extras = getIntent().getExtras();
displayNameView.setText(
extras.getString(CallManager.CALLEE_DISPLAY_NAME));
addressView.setText(extras.getString(CallManager.CALLEE_ADDRESS));
byte[] avatar = extras.getByteArray(CallManager.CALLEE_AVATAR);
if (avatar != null)
{
Bitmap bitmap
= BitmapFactory.decodeByteArray(avatar, 0, avatar.length);
avatarView.setImageBitmap(bitmap);
}
callIdentifier = extras.getString(CallManager.CALL_IDENTIFIER);
call = CallManager.getActiveCall(callIdentifier);
if(call == null)
{
logger.error("There is no call with ID: "+callIdentifier);
finish();
return;
}
ImageView hangupView = (ImageView) findViewById(R.id.hangupButton);
hangupView.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
hangupCall();
}
});
final ImageView callButton = (ImageView) findViewById(R.id.callButton);
callButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
answerCall(call, false);
}
});
}
/**
* Method mapped to answer button's onClick event
*
* @param v the answer with video button's <tt>View</tt>
*/
public void onAnswerWithVideoClicked(View v)
{
if(call != null)
{
logger.trace("Answer call with video");
answerCall(call, true);
}
}
/**
* Answers the given call and launches the call user interface.
*
* @param call the call to answer
* @param useVideo indicates if video shall be used
*/
private void answerCall(final Call call, boolean useVideo)
{
CallManager.answerCall(call, useVideo);
runOnUiThread(new Runnable()
{
public void run()
{
Intent videoCall
= VideoCallActivity
.createVideoCallIntent(
ReceivedCallActivity.this,
callIdentifier);
startActivity(videoCall);
finish();
}
});
}
/**
* {@inheritDoc}
*/
@Override
protected void onResume()
{
super.onResume();
if(call.getCallState().equals(CallState.CALL_ENDED))
{
finish();
}
else
{
call.addCallChangeListener(this);
}
}
/**
* {@inheritDoc}
*/
@Override
protected void onPause()
{
if(call != null)
{
call.removeCallChangeListener(this);
}
super.onPause();
}
/**
* Hangs up the call and finishes this <tt>Activity</tt>.
*/
private void hangupCall()
{
CallManager.hangupCall(call);
finish();
}
/**
* {@inheritDoc}
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event)
{
if(keyCode == KeyEvent.KEYCODE_BACK)
{
// Hangs up the call when back is pressed as this Activity will be
// not displayed again.
hangupCall();
return true;
}
return super.onKeyUp(keyCode, event);
}
/**
* Indicates that a new call peer has joined the source call.
*
* @param evt the <tt>CallPeerEvent</tt> containing the source call
* and call peer.
*/
public void callPeerAdded(CallPeerEvent evt)
{
}
/**
* Indicates that a call peer has left the source call.
*
* @param evt the <tt>CallPeerEvent</tt> containing the source call
* and call peer.
*/
public void callPeerRemoved(CallPeerEvent evt)
{
}
/**
* Indicates that a change has occurred in the state of the source call.
*
* @param evt the <tt>CallChangeEvent</tt> instance containing the source
* calls and its old and new state.
*/
public void callStateChanged(CallChangeEvent evt)
{
if(evt.getNewValue().equals(CallState.CALL_ENDED))
{
finish();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
import org.apache.hadoop.yarn.util.resource.Resources;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class ReservedContainerCandidatesSelector
extends PreemptionCandidatesSelector {
private static final Log LOG =
LogFactory.getLog(ReservedContainerCandidatesSelector.class);
private PreemptableResourceCalculator preemptableAmountCalculator;
/**
* A temporary data structure to remember what to preempt on a node
*/
private static class NodeForPreemption {
private float preemptionCost;
private FiCaSchedulerNode schedulerNode;
private List<RMContainer> selectedContainers;
public NodeForPreemption(float preemptionCost,
FiCaSchedulerNode schedulerNode, List<RMContainer> selectedContainers) {
this.preemptionCost = preemptionCost;
this.schedulerNode = schedulerNode;
this.selectedContainers = selectedContainers;
}
}
ReservedContainerCandidatesSelector(
CapacitySchedulerPreemptionContext preemptionContext) {
super(preemptionContext);
preemptableAmountCalculator = new PreemptableResourceCalculator(
preemptionContext, true);
}
@Override
public Map<ApplicationAttemptId, Set<RMContainer>> selectCandidates(
Map<ApplicationAttemptId, Set<RMContainer>> selectedCandidates,
Resource clusterResource,
Resource totalPreemptedResourceAllowed) {
// Calculate how much resources we need to preempt
preemptableAmountCalculator.computeIdealAllocation(clusterResource,
totalPreemptedResourceAllowed);
// Get queue to preemptable resource by partition
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition =
new HashMap<>();
for (String leafQueue : preemptionContext.getLeafQueueNames()) {
queueToPreemptableResourceByPartition.put(leafQueue,
CapacitySchedulerPreemptionUtils
.getResToObtainByPartitionForLeafQueue(preemptionContext,
leafQueue, clusterResource));
}
// Get list of nodes for preemption, ordered by preemption cost
List<NodeForPreemption> nodesForPreemption = getNodesForPreemption(
queueToPreemptableResourceByPartition, selectedCandidates,
totalPreemptedResourceAllowed);
for (NodeForPreemption nfp : nodesForPreemption) {
RMContainer reservedContainer = nfp.schedulerNode.getReservedContainer();
if (null == reservedContainer) {
continue;
}
NodeForPreemption preemptionResult = getPreemptionCandidatesOnNode(
nfp.schedulerNode, queueToPreemptableResourceByPartition,
selectedCandidates, totalPreemptedResourceAllowed, false);
if (null != preemptionResult) {
for (RMContainer c : preemptionResult.selectedContainers) {
ApplicationAttemptId appId = c.getApplicationAttemptId();
Set<RMContainer> containers = selectedCandidates.get(appId);
if (null == containers) {
containers = new HashSet<>();
selectedCandidates.put(appId, containers);
}
containers.add(c);
if (LOG.isDebugEnabled()) {
LOG.debug(this.getClass().getName() + " Marked container=" + c
.getContainerId() + " from queue=" + c.getQueueName()
+ " to be preemption candidates");
}
}
}
}
return selectedCandidates;
}
private Resource getPreemptableResource(String queueName,
String partitionName,
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition) {
Map<String, Resource> partitionToPreemptable =
queueToPreemptableResourceByPartition.get(queueName);
if (null == partitionToPreemptable) {
return null;
}
Resource preemptable = partitionToPreemptable.get(partitionName);
return preemptable;
}
private boolean tryToPreemptFromQueue(String queueName, String partitionName,
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition,
Resource required, Resource totalPreemptionAllowed, boolean readOnly) {
Resource preemptable = getPreemptableResource(queueName, partitionName,
queueToPreemptableResourceByPartition);
if (null == preemptable) {
return false;
}
if (!Resources.fitsIn(rc, required, preemptable)) {
return false;
}
if (!Resources.fitsIn(rc, required, totalPreemptionAllowed)) {
return false;
}
if (!readOnly) {
Resources.subtractFrom(preemptable, required);
Resources.subtractFrom(totalPreemptionAllowed, required);
}
return true;
}
/**
* Try to check if we can preempt resources for reserved container in given node
* @param node
* @param queueToPreemptableResourceByPartition it's a map of
* <queueName, <partition, preemptable-resource>>
* @param readOnly do we want to modify preemptable resource after we selected
* candidates
* @return NodeForPreemption if it's possible to preempt containers on the node
* to satisfy reserved resource
*/
private NodeForPreemption getPreemptionCandidatesOnNode(
FiCaSchedulerNode node,
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition,
Map<ApplicationAttemptId, Set<RMContainer>> selectedCandidates,
Resource totalPreemptionAllowed, boolean readOnly) {
RMContainer reservedContainer = node.getReservedContainer();
Resource available = Resources.clone(node.getUnallocatedResource());
Resource totalSelected = Resources.createResource(0);
List<RMContainer> sortedRunningContainers =
node.getCopiedListOfRunningContainers();
List<RMContainer> selectedContainers = new ArrayList<>();
Map<ContainerId, RMContainer> killableContainers =
node.getKillableContainers();
// Sort running container by launch time, we preferred to preempt recent
// launched preempt container
Collections.sort(sortedRunningContainers, new Comparator<RMContainer>() {
@Override public int compare(RMContainer o1, RMContainer o2) {
return -1 * o1.getContainerId().compareTo(o2.getContainerId());
}
});
// First check: can we preempt containers to allocate the
// reservedContainer?
boolean canAllocateReservedContainer = false;
// At least, we can get available + killable resources from this node
Resource cur = Resources.add(available, node.getTotalKillableResources());
String partition = node.getPartition();
// Avoid preempt any container if required <= available + killable
if (Resources.fitsIn(rc, reservedContainer.getReservedResource(), cur)) {
return null;
}
// Extra cost of am container preemption
float amPreemptionCost = 0f;
for (RMContainer c : sortedRunningContainers) {
String containerQueueName = c.getQueueName();
// Skip container if it is already marked killable
if (killableContainers.containsKey(c.getContainerId())) {
continue;
}
// An alternative approach is add a "penalty cost" if AM container is
// selected. Here for safety, avoid preempt AM container in any cases
if (c.isAMContainer()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Skip selecting AM container on host=" + node.getNodeID()
+ " AM container=" + c.getContainerId());
}
continue;
}
// Can we preempt container c?
// Check if we have quota to preempt this container
boolean canPreempt = tryToPreemptFromQueue(containerQueueName, partition,
queueToPreemptableResourceByPartition, c.getAllocatedResource(),
totalPreemptionAllowed, readOnly);
// If we can, add to selected container, and change resource accordingly.
if (canPreempt) {
if (!CapacitySchedulerPreemptionUtils.isContainerAlreadySelected(c,
selectedCandidates)) {
if (!readOnly) {
selectedContainers.add(c);
}
Resources.addTo(totalSelected, c.getAllocatedResource());
}
Resources.addTo(cur, c.getAllocatedResource());
if (Resources.fitsIn(rc,
reservedContainer.getReservedResource(), cur)) {
canAllocateReservedContainer = true;
break;
}
}
}
if (!canAllocateReservedContainer) {
if (!readOnly) {
// Revert queue preemption quotas
for (RMContainer c : selectedContainers) {
Resource res = getPreemptableResource(c.getQueueName(), partition,
queueToPreemptableResourceByPartition);
if (null == res) {
// This shouldn't happen in normal cases, one possible cause is
// container moved to different queue while executing preemption logic.
// Ignore such failures.
continue;
}
Resources.addTo(res, c.getAllocatedResource());
}
}
return null;
}
float ratio = Resources.ratio(rc, totalSelected,
reservedContainer.getReservedResource());
// Compute preemption score
NodeForPreemption nfp = new NodeForPreemption(ratio + amPreemptionCost,
node, selectedContainers);
return nfp;
}
private List<NodeForPreemption> getNodesForPreemption(
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition,
Map<ApplicationAttemptId, Set<RMContainer>> selectedCandidates,
Resource totalPreemptionAllowed) {
List<NodeForPreemption> nfps = new ArrayList<>();
// get nodes have reserved container
for (FiCaSchedulerNode node : preemptionContext.getScheduler()
.getAllNodes()) {
if (node.getReservedContainer() != null) {
NodeForPreemption nfp = getPreemptionCandidatesOnNode(node,
queueToPreemptableResourceByPartition, selectedCandidates,
totalPreemptionAllowed, true);
if (null != nfp) {
// Null means we cannot preempt containers on the node to satisfy
// reserved container
nfps.add(nfp);
}
}
}
// Return sorted node-for-preemptions (by cost)
Collections.sort(nfps, new Comparator<NodeForPreemption>() {
@Override
public int compare(NodeForPreemption o1, NodeForPreemption o2) {
return Float.compare(o1.preemptionCost, o2.preemptionCost);
}
});
return nfps;
}
}
| |
package org.broadinstitute.hellbender.tools.picard.vcf;
import com.google.common.annotations.VisibleForTesting;
import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.util.BlockCompressedInputStream;
import htsjdk.samtools.util.BlockCompressedOutputStream;
import htsjdk.samtools.util.BlockCompressedStreamConstants;
import htsjdk.samtools.util.CloseableIterator;
import htsjdk.samtools.util.CloserUtil;
import htsjdk.samtools.util.CollectionUtil;
import htsjdk.samtools.util.IOUtil;
import htsjdk.samtools.util.PeekableIterator;
import htsjdk.samtools.util.RuntimeIOException;
import htsjdk.tribble.AbstractFeatureReader;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextComparator;
import htsjdk.variant.variantcontext.writer.Options;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
import htsjdk.variant.vcf.VCFFileReader;
import htsjdk.variant.vcf.VCFHeader;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.broadinstitute.hellbender.cmdline.Argument;
import org.broadinstitute.hellbender.cmdline.CommandLineProgramProperties;
import org.broadinstitute.hellbender.cmdline.PicardCommandLineProgram;
import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions;
import org.broadinstitute.hellbender.cmdline.programgroups.VariantProgramGroup;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.runtime.ProgressLogger;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.EnumSet;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Simple little class that combines multiple VCFs that have exactly the same set of samples
* and totally discrete sets of loci.
*
* @author Tim Fennell
*/
@CommandLineProgramProperties(
summary = "Gathers multiple VCF files from a scatter operation into a single VCF file. Input files " +
"must be supplied in genomic order and must not have events at overlapping positions.",
oneLineSummary = "Gathers multiple VCF files from a scatter operation into a single VCF file",
programGroup = VariantProgramGroup.class
)
public final class GatherVcfs extends PicardCommandLineProgram {
@Argument(shortName = StandardArgumentDefinitions.INPUT_SHORT_NAME, doc = "Input VCF file(s).")
public List<File> INPUT;
@Argument(shortName = StandardArgumentDefinitions.OUTPUT_SHORT_NAME, doc = "Output VCF file.")
public File OUTPUT;
private static final Logger log = LogManager.getLogger();
public GatherVcfs() {
CREATE_INDEX = true;
}
@Override
protected Object doWork() {
log.info("Checking inputs.");
INPUT = IOUtil.unrollFiles(INPUT, IOUtil.VCF_EXTENSIONS);
for (final File f: INPUT) IOUtil.assertFileIsReadable(f);
IOUtil.assertFileIsWritable(OUTPUT);
final SAMSequenceDictionary sequenceDictionary = VCFFileReader.getSequenceDictionary(INPUT.get(0));
if (CREATE_INDEX && sequenceDictionary == null) {
throw new UserException("In order to index the resulting VCF input VCFs must contain ##contig lines.");
}
log.info("Checking file headers and first records to ensure compatibility.");
assertSameSamplesAndValidOrdering(INPUT);
if (areAllBlockCompressed(INPUT) && areAllBlockCompressed(CollectionUtil.makeList(OUTPUT))) {
log.info("Gathering by copying gzip blocks. Will not be able to validate position non-overlap of files.");
if (CREATE_INDEX) log.warn("Index creation not currently supported when gathering block compressed VCFs.");
gatherWithBlockCopying(INPUT, OUTPUT);
}
else {
log.info("Gathering by conventional means.");
gatherConventionally(sequenceDictionary, CREATE_INDEX, INPUT, OUTPUT);
}
return null;
}
/** Checks (via filename checking) that all files appear to be block compressed files. */
@VisibleForTesting
static boolean areAllBlockCompressed(final List<File> input) {
for (final File f : input) {
if (f == null || f.getName().contains(".bcf") || !AbstractFeatureReader.hasBlockCompressedExtension(f)){
return false;}
}
return true;
}
/** Validates that all headers contain the same set of genotyped samples and that files are in order by position of first record. */
private static void assertSameSamplesAndValidOrdering(final List<File> inputFiles) {
final VCFHeader header = new VCFFileReader(inputFiles.get(0), false).getFileHeader();
final SAMSequenceDictionary dict = header.getSequenceDictionary();
final VariantContextComparator comparator = new VariantContextComparator(header.getSequenceDictionary());
final List<String> samples = header.getGenotypeSamples();
File lastFile = null;
VariantContext lastContext = null;
for (final File f : inputFiles) {
final VCFFileReader in = new VCFFileReader(f, false);
dict.assertSameDictionary(in.getFileHeader().getSequenceDictionary());
final List<String> theseSamples = in.getFileHeader().getGenotypeSamples();
if (!samples.equals(theseSamples)) {
final SortedSet<String> s1 = new TreeSet<>(samples);
final SortedSet<String> s2 = new TreeSet<>(theseSamples);
s1.removeAll(theseSamples);
s2.removeAll(samples);
throw new IllegalArgumentException("VCFs do not have identical sample lists." +
" Samples unique to first file: " + s1 + ". Samples unique to " + f.getAbsolutePath() + ": " + s2 + ".");
}
final CloseableIterator<VariantContext> variantIterator = in.iterator();
if (variantIterator.hasNext()) {
final VariantContext currentContext = variantIterator.next();
if (lastContext != null) {
if (comparator.compare(lastContext, currentContext) >= 0) {
throw new IllegalArgumentException("First record in file " + f.getAbsolutePath() + " is not after first record in " +
"previous file " + lastFile.getAbsolutePath());
}
}
lastContext = currentContext;
lastFile = f;
}
CloserUtil.close(in);
}
}
/** Code for gathering multiple VCFs that works regardless of input format and output format, but can be slow. */
private static void gatherConventionally(final SAMSequenceDictionary sequenceDictionary,
final boolean createIndex,
final List<File> inputFiles,
final File outputFile) {
final EnumSet<Options> options = EnumSet.copyOf(VariantContextWriterBuilder.DEFAULT_OPTIONS);
if (createIndex) options.add(Options.INDEX_ON_THE_FLY); else options.remove(Options.INDEX_ON_THE_FLY);
try (final VariantContextWriter out = new VariantContextWriterBuilder().setOutputFile(outputFile)
.setReferenceDictionary(sequenceDictionary).setOptions(options).build()) {
final ProgressLogger progress = new ProgressLogger(log, 10000);
VariantContext lastContext = null;
File lastFile = null;
VCFHeader firstHeader = null;
VariantContextComparator comparator = null;
for (final File f : inputFiles) {
log.debug("Gathering from file: ", f.getAbsolutePath());
final VCFFileReader variantReader = new VCFFileReader(f, false);
final PeekableIterator<VariantContext> variantIterator = new PeekableIterator<>(variantReader.iterator());
final VCFHeader header = variantReader.getFileHeader();
if (firstHeader == null) {
firstHeader = header;
out.writeHeader(firstHeader);
comparator = new VariantContextComparator(firstHeader.getContigLines());
}
if (lastContext != null && variantIterator.hasNext()) {
final VariantContext vc = variantIterator.peek();
if (comparator.compare(vc, lastContext) <= 0) {
throw new IllegalStateException("First variant in file " + f.getAbsolutePath() + " is at " + vc.getSource() +
" but last variant in earlier file " + lastFile.getAbsolutePath() + " is at " + lastContext.getSource());
}
}
while (variantIterator.hasNext()) {
lastContext = variantIterator.next();
out.add(lastContext);
progress.record(lastContext.getContig(), lastContext.getStart());
}
lastFile = f;
CloserUtil.close(variantIterator);
CloserUtil.close(variantReader);
}
}
}
/**
* Assumes that all inputs and outputs are block compressed VCF files and copies them without decompressing and parsing
* most of the gzip blocks. Will decompress and parse blocks up to the one containing the end of the header in each file
* (often the first block) and re-compress any data remaining in that block into a new block in the output file. Subsequent
* blocks (excluding a terminator block if present) are copied directly from input to output.
*/
private static void gatherWithBlockCopying(final List<File> vcfs, final File output) {
try (final FileOutputStream out = new FileOutputStream(output)) {
boolean isFirstFile = true;
for (final File f : vcfs) {
log.info("Gathering " + f.getAbsolutePath());
try (final FileInputStream in = new FileInputStream(f)) {
// a) It's good to check that the end of the file is valid and b) we need to know if there's a terminator block and not copy it
final BlockCompressedInputStream.FileTermination term = BlockCompressedInputStream.checkTermination(f);
if (term == BlockCompressedInputStream.FileTermination.DEFECTIVE)
throw new UserException.MalformedFile(f.getAbsolutePath() + " does not have a valid GZIP block at the end of the file.");
if (!isFirstFile) {
final BlockCompressedInputStream blockIn = new BlockCompressedInputStream(in, false);
boolean lastByteNewline = true;
while (in.available() > 0) {
// Read a block - blockIn.available() is guaranteed to return the bytes remaining in the block that has been
// read, and since we haven't consumed any yet, that is the block size.
final int blockLength = blockIn.available();
final byte[] blockContents = new byte[blockLength];
final int read = blockIn.read(blockContents);
if (blockLength == 0 || read != blockLength)
throw new IllegalStateException("Could not read available bytes from BlockCompressedInputStream.");
// Scan forward within the block to see if we can find the end of the header within this block
int firstNonHeaderByteIndex = -1;
for (int i = 0; i < read; ++i) {
final byte b = blockContents[i];
final boolean thisByteNewline = (b == '\n' || b == '\r');
if (lastByteNewline && !thisByteNewline && b != '#') {
// Aha! Found first byte of non-header data in file!
firstNonHeaderByteIndex = i;
break;
}
lastByteNewline = thisByteNewline;
}
// If we found the end of the header then write the remainder of this block out as a
// new gzip block and then break out of the while loop
if (firstNonHeaderByteIndex >= 0) {
final BlockCompressedOutputStream blockOut = new BlockCompressedOutputStream(out, null);
blockOut.write(blockContents, firstNonHeaderByteIndex, blockContents.length - firstNonHeaderByteIndex);
blockOut.flush();
// Don't close blockOut because closing underlying stream would break everything
break;
}
}
}
// Copy remainder of input stream into output stream
final long currentPos = in.getChannel().position();
final long length = f.length();
final long skipLast = (term == BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK) ?
BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length : 0;
final long bytesToWrite = length - skipLast - currentPos;
IOUtil.transferByStream(in, out, bytesToWrite);
isFirstFile = false;
}
}
// And lastly add the Terminator block and close up
out.write(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK);
}
catch (final IOException ioe) {
throw new RuntimeIOException(ioe);
}
}
}
| |
/* CreateAssignExpression Copyright (C) 1998-2002 Jochen Hoenicke.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; see the file COPYING.LESSER. If not, write to
* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id: CreateAssignExpression.java,v 1.20.2.2 2002/05/28 17:34:08 hoenicke Exp $
*/
package jode.flow;
import jode.expr.BinaryOperator;
import jode.expr.ConvertOperator;
import jode.expr.Expression;
import jode.expr.LocalLoadOperator;
import jode.expr.LocalStoreOperator;
import jode.expr.Operator;
import jode.expr.StoreInstruction;
import jode.expr.StringAddOperator;
import jode.type.Type;
public class CreateAssignExpression {
public static boolean transform(InstructionContainer ic,
StructuredBlock last) {
if (!(last.outer instanceof SequentialBlock)
|| !(ic.getInstruction() instanceof StoreInstruction)
|| !(ic.getInstruction().isVoid()))
return false;
return (createAssignOp(ic, last) || createAssignExpression(ic, last));
}
public static boolean createAssignOp(InstructionContainer ic,
StructuredBlock last) {
/*
* Situation:
*
* (push loadstoreOps) <- not checked sequBlock: dup (may be missing for
* static / local variables) opBlock: PUSH (optional narrow)((optional
* wide) load(stack) * RHS) (optional dup_x) store(POP)
*
* We transform it to: (push loadstoreOps) rightHandSide (optional
* dup_x) store(stack) *= (stack)
*
* If the optional dup is present the store*= becomes non void.
*/
SequentialBlock opBlock = (SequentialBlock) last.outer;
StoreInstruction store = (StoreInstruction) ic.getInstruction();
if (!store.isFreeOperator() || store.isOpAssign())
return false;
Expression lvalue = store.getSubExpressions()[0];
int lvalueCount = lvalue.getFreeOperandCount();
boolean isAssignOp = false;
if (opBlock.subBlocks[0] instanceof SpecialBlock) {
SpecialBlock dup = (SpecialBlock) opBlock.subBlocks[0];
if (dup.type != SpecialBlock.DUP || dup.depth != lvalueCount
|| dup.count != lvalue.getType().stackSize()
|| !(opBlock.outer instanceof SequentialBlock))
return false;
opBlock = (SequentialBlock) opBlock.outer;
isAssignOp = true;
}
if (!(opBlock.subBlocks[0] instanceof InstructionBlock))
return false;
InstructionBlock ib = (InstructionBlock) opBlock.subBlocks[0];
if (!(ib.getInstruction() instanceof Operator))
return false;
Operator expr = (Operator) ib.getInstruction();
if (expr.getFreeOperandCount() != lvalueCount)
return false;
Type rvalueType = expr.getType();
SpecialBlock dup = null;
if (lvalueCount > 0) {
if (!(opBlock.outer instanceof SequentialBlock)
|| !(opBlock.outer.getSubBlocks()[0] instanceof SpecialBlock))
return false;
SequentialBlock sequBlock = (SequentialBlock) opBlock.outer;
dup = (SpecialBlock) sequBlock.subBlocks[0];
if (dup.type != SpecialBlock.DUP || dup.depth != 0
|| dup.count != lvalueCount)
return false;
}
int opIndex;
Expression rightHandSide;
if (expr instanceof ConvertOperator
&& expr.getSubExpressions()[0] instanceof Operator
&& expr.getType().isOfType(lvalue.getType())) {
/*
* This gets tricky. We need to allow something like s = (short)
* (int) ((double) s / 0.1);
*/
expr = (Operator) expr.getSubExpressions()[0];
while (expr instanceof ConvertOperator
&& expr.getSubExpressions()[0] instanceof Operator)
expr = (Operator) expr.getSubExpressions()[0];
}
if (expr instanceof BinaryOperator) {
opIndex = expr.getOperatorIndex();
if (opIndex < expr.ADD_OP || opIndex >= expr.ASSIGN_OP)
return false;
if (!(expr.getSubExpressions()[0] instanceof Operator))
return false;
Operator loadExpr = (Operator) expr.getSubExpressions()[0];
while (loadExpr instanceof ConvertOperator
&& loadExpr.getSubExpressions()[0] instanceof Operator)
loadExpr = (Operator) loadExpr.getSubExpressions()[0];
if (!store.lvalueMatches((Operator) loadExpr)
|| !(loadExpr.isFreeOperator(lvalueCount)))
return false;
if (lvalue instanceof LocalStoreOperator)
((LocalLoadOperator) loadExpr).getLocalInfo().combineWith(
((LocalStoreOperator) lvalue).getLocalInfo());
rightHandSide = expr.getSubExpressions()[1];
} else {
/*
* For String += the situation is more complex. what is marked as
* load(stack) * rightHandSide above is really (after
* simplification):
*
* PUSH ((load(stack) + right) + Hand) + Side
*/
Expression simple = expr.simplifyString();
rightHandSide = simple;
/* Now search for the leftmost operand ... */
Operator lastExpr = null;
Operator parent = null;
while (simple instanceof StringAddOperator) {
parent = lastExpr;
lastExpr = (Operator) simple;
simple = lastExpr.getSubExpressions()[0];
}
/* ... check it ... */
if (lastExpr == null || !(simple instanceof Operator)
|| !store.lvalueMatches((Operator) simple)
|| !(((Operator) simple).isFreeOperator(lvalueCount)))
return false;
if (lvalue instanceof LocalStoreOperator)
((LocalLoadOperator) simple).getLocalInfo().combineWith(
((LocalStoreOperator) lvalue).getLocalInfo());
/* ... and remove it. */
if (parent != null) {
parent.setSubExpressions(0, lastExpr.getSubExpressions()[1]);
} else {
rightHandSide = lastExpr.getSubExpressions()[1];
}
opIndex = Operator.ADD_OP;
}
if (dup != null)
dup.removeBlock();
ib.setInstruction(rightHandSide);
lvalue.setType(rvalueType);
store.makeOpAssign(store.OPASSIGN_OP + opIndex);
if (isAssignOp)
store.makeNonVoid();
last.replace(opBlock.subBlocks[1]);
return true;
}
public static boolean createAssignExpression(InstructionContainer ic,
StructuredBlock last) {
/*
* Situation: sequBlock: dup_X(lvalue_count) store(POP) = POP
*/
SequentialBlock sequBlock = (SequentialBlock) last.outer;
StoreInstruction store = (StoreInstruction) ic.getInstruction();
if (sequBlock.subBlocks[0] instanceof SpecialBlock
&& store.isFreeOperator()) {
Expression lvalue = store.getSubExpressions()[0];
SpecialBlock dup = (SpecialBlock) sequBlock.subBlocks[0];
if (dup.type != SpecialBlock.DUP
|| dup.depth != lvalue.getFreeOperandCount()
|| dup.count != lvalue.getType().stackSize())
return false;
dup.removeBlock();
store.makeNonVoid();
return true;
}
return false;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flex.forks.batik.dom.svg;
import java.awt.geom.AffineTransform;
import java.awt.geom.NoninvertibleTransformException;
import org.w3c.dom.DOMException;
import org.w3c.dom.svg.SVGException;
import org.w3c.dom.svg.SVGMatrix;
/**
* This class provides an abstract implementation of the {@link SVGMatrix}
* interface.
*
* @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a>
* @version $Id: AbstractSVGMatrix.java 501495 2007-01-30 18:00:36Z dvholten $
*/
public abstract class AbstractSVGMatrix implements SVGMatrix {
/**
* The transform used to implement flipX.
*/
protected static final AffineTransform FLIP_X_TRANSFORM =
new AffineTransform(-1, 0, 0, 1, 0, 0);
/**
* The transform used to implement flipX.
*/
protected static final AffineTransform FLIP_Y_TRANSFORM =
new AffineTransform(1, 0, 0, -1, 0, 0);
/**
* Returns the associated AffineTransform.
*/
protected abstract AffineTransform getAffineTransform();
/**
* Implements {@link SVGMatrix#getA()}.
*/
public float getA() {
return (float)getAffineTransform().getScaleX();
}
/**
* Implements {@link SVGMatrix#setA(float)}.
*/
public void setA(float a) throws DOMException {
AffineTransform at = getAffineTransform();
at.setTransform(a,
at.getShearY(),
at.getShearX(),
at.getScaleY(),
at.getTranslateX(),
at.getTranslateY());
}
/**
* Implements {@link SVGMatrix#getB()}.
*/
public float getB() {
return (float)getAffineTransform().getShearY();
}
/**
* Implements {@link SVGMatrix#setB(float)}.
*/
public void setB(float b) throws DOMException {
AffineTransform at = getAffineTransform();
at.setTransform(at.getScaleX(),
b,
at.getShearX(),
at.getScaleY(),
at.getTranslateX(),
at.getTranslateY());
}
/**
* Implements {@link SVGMatrix#getC()}.
*/
public float getC() {
return (float)getAffineTransform().getShearX();
}
/**
* Implements {@link SVGMatrix#setC(float)}.
*/
public void setC(float c) throws DOMException {
AffineTransform at = getAffineTransform();
at.setTransform(at.getScaleX(),
at.getShearY(),
c,
at.getScaleY(),
at.getTranslateX(),
at.getTranslateY());
}
/**
* Implements {@link SVGMatrix#getD()}.
*/
public float getD() {
return (float)getAffineTransform().getScaleY();
}
/**
* Implements {@link SVGMatrix#setD(float)}.
*/
public void setD(float d) throws DOMException {
AffineTransform at = getAffineTransform();
at.setTransform(at.getScaleX(),
at.getShearY(),
at.getShearX(),
d,
at.getTranslateX(),
at.getTranslateY());
}
/**
* Implements {@link SVGMatrix#getE()}.
*/
public float getE() {
return (float)getAffineTransform().getTranslateX();
}
/**
* Implements {@link SVGMatrix#setE(float)}.
*/
public void setE(float e) throws DOMException {
AffineTransform at = getAffineTransform();
at.setTransform(at.getScaleX(),
at.getShearY(),
at.getShearX(),
at.getScaleY(),
e,
at.getTranslateY());
}
/**
* Implements {@link SVGMatrix#getF()}.
*/
public float getF() {
return (float)getAffineTransform().getTranslateY();
}
/**
* Implements {@link SVGMatrix#setF(float)}.
*/
public void setF(float f) throws DOMException {
AffineTransform at = getAffineTransform();
at.setTransform(at.getScaleX(),
at.getShearY(),
at.getShearX(),
at.getScaleY(),
at.getTranslateX(),
f);
}
/**
* Implements {@link SVGMatrix#multiply(SVGMatrix)}.
*/
public SVGMatrix multiply(SVGMatrix secondMatrix) {
AffineTransform at = new AffineTransform(secondMatrix.getA(),
secondMatrix.getB(),
secondMatrix.getC(),
secondMatrix.getD(),
secondMatrix.getE(),
secondMatrix.getF());
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.concatenate(at);
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#inverse()}.
*/
public SVGMatrix inverse() throws SVGException {
try {
return new SVGOMMatrix(getAffineTransform().createInverse());
} catch (NoninvertibleTransformException e) {
throw new SVGOMException(SVGException.SVG_MATRIX_NOT_INVERTABLE,
e.getMessage());
}
}
/**
* Implements {@link SVGMatrix#translate(float,float)}.
*/
public SVGMatrix translate(float x, float y) {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.translate(x, y);
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#scale(float)}.
*/
public SVGMatrix scale(float scaleFactor) {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.scale(scaleFactor, scaleFactor);
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#scaleNonUniform(float,float)}.
*/
public SVGMatrix scaleNonUniform (float scaleFactorX, float scaleFactorY) {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.scale(scaleFactorX, scaleFactorY);
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#rotate(float)}.
*/
public SVGMatrix rotate(float angle) {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.rotate( Math.toRadians( angle ) );
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#rotateFromVector(float,float)}.
*/
public SVGMatrix rotateFromVector(float x, float y) throws SVGException {
if (x == 0 || y == 0) {
throw new SVGOMException(SVGException.SVG_INVALID_VALUE_ERR, "");
}
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.rotate(Math.atan2(y, x));
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#flipX()}.
*/
public SVGMatrix flipX() {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.concatenate(FLIP_X_TRANSFORM);
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#flipY()}.
*/
public SVGMatrix flipY() {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.concatenate(FLIP_Y_TRANSFORM);
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#skewX(float)}.
*/
public SVGMatrix skewX(float angleDeg) {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.concatenate
(AffineTransform.getShearInstance( Math.tan( Math.toRadians( angleDeg )), 0));
return new SVGOMMatrix(tr);
}
/**
* Implements {@link SVGMatrix#skewY(float)}.
*/
public SVGMatrix skewY(float angleDeg ) {
AffineTransform tr = (AffineTransform)getAffineTransform().clone();
tr.concatenate
(AffineTransform.getShearInstance(0, Math.tan( Math.toRadians( angleDeg ) ) ));
return new SVGOMMatrix(tr);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.state;
import org.codehaus.jackson.annotate.JsonProperty;
import io.swagger.annotations.ApiModelProperty;
/**
* Cluster Health Report (part of Clusters API response)
*/
public class ClusterHealthReport {
private static final String HOST_STALE_CONFIG = "Host/stale_config";
private static final String HOST_MAINTENANCE_STATE = "Host/maintenance_state";
private static final String HOST_HOST_STATE_HEALTHY = "Host/host_state/HEALTHY";
private static final String HOST_HOST_STATE_UNHEALTHY = "Host/host_state/UNHEALTHY";
private static final String HOST_HOST_STATE_INIT = "Host/host_state/INIT";
private static final String HOST_HOST_STATUS_HEALTHY = "Host/host_status/HEALTHY";
private static final String HOST_HOST_STATUS_UNHEALTHY = "Host/host_status/UNHEALTHY";
private static final String HOST_HOST_STATUS_UNKNOWN = "Host/host_status/UNKNOWN";
private static final String HOST_HOST_STATUS_ALERT = "Host/host_status/ALERT";
private static final String HOST_HOST_STATE_HEARTBEAT_LOST = "Host/host_state/HEARTBEAT_LOST";
private int staleConfigsHosts;
private int maintenanceStateHosts;
private int healthyStateHosts;
private int unhealthyStateHosts;
private int heartbeatLostStateHosts;
private int initStateHosts;
private int healthyStatusHosts;
private int unhealthyStatusHosts;
private int unknownStatusHosts;
private int alertStatusHosts;
/**
* @return number of hosts having stale_config set to true
*/
@JsonProperty(HOST_STALE_CONFIG)
@ApiModelProperty(name = HOST_STALE_CONFIG)
public int getStaleConfigsHosts() {
return staleConfigsHosts;
}
/**
* @param staleConfigsHosts number of hosts having stale_config set to true
*/
public void setStaleConfigsHosts(int staleConfigsHosts) {
this.staleConfigsHosts = staleConfigsHosts;
}
/**
* @return number of hosts having maintenance state on
*/
@JsonProperty(HOST_MAINTENANCE_STATE)
@ApiModelProperty(name = HOST_MAINTENANCE_STATE)
public int getMaintenanceStateHosts() {
return maintenanceStateHosts;
}
/**
* @param maintenanceStateHosts number of hosts having maintenance state on
*/
public void setMaintenanceStateHosts(int maintenanceStateHosts) {
this.maintenanceStateHosts = maintenanceStateHosts;
}
/**
* @return number of hosts having host state HEALTHY
*/
@JsonProperty(HOST_HOST_STATE_HEALTHY)
@ApiModelProperty(name = HOST_HOST_STATE_HEALTHY)
public int getHealthyStateHosts() {
return healthyStateHosts;
}
/**
* @param healthyStateHosts number of hosts having host state HEALTHY
*/
public void setHealthyStateHosts(int healthyStateHosts) {
this.healthyStateHosts = healthyStateHosts;
}
/**
* @return number of hosts having host state UNHEALTHY
*/
@JsonProperty(HOST_HOST_STATE_UNHEALTHY)
@ApiModelProperty(name = HOST_HOST_STATE_UNHEALTHY)
public int getUnhealthyStateHosts() {
return unhealthyStateHosts;
}
/**
* @param unhealthyStateHosts number of hosts having host state UNHEALTHY
*/
public void setUnhealthyStateHosts(int unhealthyStateHosts) {
this.unhealthyStateHosts = unhealthyStateHosts;
}
/**
* @return number of hosts having host state INIT
*/
@JsonProperty(HOST_HOST_STATE_INIT)
@ApiModelProperty(name = HOST_HOST_STATE_INIT)
public int getInitStateHosts() {
return initStateHosts;
}
/**
* @param initStateHosts number of hosts having host state INIT
*/
public void setInitStateHosts(int initStateHosts) {
this.initStateHosts = initStateHosts;
}
/**
* @return number of hosts having host status HEALTHY
*/
@JsonProperty(HOST_HOST_STATUS_HEALTHY)
@ApiModelProperty(name = HOST_HOST_STATUS_HEALTHY)
public int getHealthyStatusHosts() {
return healthyStatusHosts;
}
/**
* @param healthyStatusHosts number of hosts having host status HEALTHY
*/
public void setHealthyStatusHosts(int healthyStatusHosts) {
this.healthyStatusHosts = healthyStatusHosts;
}
/**
* @return number of hosts having host status UNHEALTHY
*/
@JsonProperty(HOST_HOST_STATUS_UNHEALTHY)
@ApiModelProperty(name = HOST_HOST_STATUS_UNHEALTHY)
public int getUnhealthyStatusHosts() {
return unhealthyStatusHosts;
}
/**
* @param unhealthyStatusHosts number of hosts having host status UNHEALTHY
*/
public void setUnhealthyStatusHosts(int unhealthyStatusHosts) {
this.unhealthyStatusHosts = unhealthyStatusHosts;
}
/**
* @return number of hosts having host status UNKNOWN
*/
@JsonProperty(HOST_HOST_STATUS_UNKNOWN)
@ApiModelProperty(name = HOST_HOST_STATUS_UNKNOWN)
public int getUnknownStatusHosts() {
return unknownStatusHosts;
}
/**
* @param unknownStatusHosts number of hosts having host status UNKNOWN
*/
public void setUnknownStatusHosts(int unknownStatusHosts) {
this.unknownStatusHosts = unknownStatusHosts;
}
/**
* @return number of hosts having host status ALERT
*/
@JsonProperty(HOST_HOST_STATUS_ALERT)
@ApiModelProperty(name = HOST_HOST_STATUS_ALERT)
public int getAlertStatusHosts() {
return alertStatusHosts;
}
/**
* @param alertStatusHosts number of hosts having host status ALERT
*/
public void setAlertStatusHosts(int alertStatusHosts) {
this.alertStatusHosts = alertStatusHosts;
}
/**
* @return number of hosts having host status HEARTBEAT_LOST
*/
@JsonProperty(HOST_HOST_STATE_HEARTBEAT_LOST)
@ApiModelProperty(name = HOST_HOST_STATE_HEARTBEAT_LOST)
public int getHeartbeatLostStateHosts() {
return heartbeatLostStateHosts;
}
/**
* @param heartbeatLostStateHosts number of hosts
* having host status HEARTBEAT_LOST
*/
public void setHeartbeatLostStateHosts(int heartbeatLostStateHosts) {
this.heartbeatLostStateHosts = heartbeatLostStateHosts;
}
public ClusterHealthReport() {
}
@Override
public String toString() {
return "ClusterHealthReport{" +
"staleConfigsHosts=" + staleConfigsHosts +
", maintenanceStateHosts=" + maintenanceStateHosts +
", healthyStateHosts=" + healthyStateHosts +
", unhealthyStateHosts=" + unhealthyStateHosts +
", heartbeatLostStateHosts=" + heartbeatLostStateHosts +
", initStateHosts=" + initStateHosts +
", healthyStatusHosts=" + healthyStatusHosts +
", unhealthyStatusHosts=" + unhealthyStatusHosts +
", unknownStatusHosts=" + unknownStatusHosts +
", alertStatusHosts=" + alertStatusHosts +
'}';
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.mariadb.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.annotation.JsonFlatten;
import com.azure.core.management.ProxyResource;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.OffsetDateTime;
import java.util.List;
/** Represents a Query Statistic. */
@JsonFlatten
@Fluent
public class QueryStatisticInner extends ProxyResource {
@JsonIgnore private final ClientLogger logger = new ClientLogger(QueryStatisticInner.class);
/*
* Database query identifier.
*/
@JsonProperty(value = "properties.queryId")
private String queryId;
/*
* Observation start time.
*/
@JsonProperty(value = "properties.startTime")
private OffsetDateTime startTime;
/*
* Observation end time.
*/
@JsonProperty(value = "properties.endTime")
private OffsetDateTime endTime;
/*
* Aggregation function name.
*/
@JsonProperty(value = "properties.aggregationFunction")
private String aggregationFunction;
/*
* The list of database names.
*/
@JsonProperty(value = "properties.databaseNames")
private List<String> databaseNames;
/*
* Number of query executions in this time interval.
*/
@JsonProperty(value = "properties.queryExecutionCount")
private Long queryExecutionCount;
/*
* Metric name.
*/
@JsonProperty(value = "properties.metricName")
private String metricName;
/*
* Metric display name.
*/
@JsonProperty(value = "properties.metricDisplayName")
private String metricDisplayName;
/*
* Metric value.
*/
@JsonProperty(value = "properties.metricValue")
private Double metricValue;
/*
* Metric value unit.
*/
@JsonProperty(value = "properties.metricValueUnit")
private String metricValueUnit;
/**
* Get the queryId property: Database query identifier.
*
* @return the queryId value.
*/
public String queryId() {
return this.queryId;
}
/**
* Set the queryId property: Database query identifier.
*
* @param queryId the queryId value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withQueryId(String queryId) {
this.queryId = queryId;
return this;
}
/**
* Get the startTime property: Observation start time.
*
* @return the startTime value.
*/
public OffsetDateTime startTime() {
return this.startTime;
}
/**
* Set the startTime property: Observation start time.
*
* @param startTime the startTime value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withStartTime(OffsetDateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Get the endTime property: Observation end time.
*
* @return the endTime value.
*/
public OffsetDateTime endTime() {
return this.endTime;
}
/**
* Set the endTime property: Observation end time.
*
* @param endTime the endTime value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withEndTime(OffsetDateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Get the aggregationFunction property: Aggregation function name.
*
* @return the aggregationFunction value.
*/
public String aggregationFunction() {
return this.aggregationFunction;
}
/**
* Set the aggregationFunction property: Aggregation function name.
*
* @param aggregationFunction the aggregationFunction value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withAggregationFunction(String aggregationFunction) {
this.aggregationFunction = aggregationFunction;
return this;
}
/**
* Get the databaseNames property: The list of database names.
*
* @return the databaseNames value.
*/
public List<String> databaseNames() {
return this.databaseNames;
}
/**
* Set the databaseNames property: The list of database names.
*
* @param databaseNames the databaseNames value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withDatabaseNames(List<String> databaseNames) {
this.databaseNames = databaseNames;
return this;
}
/**
* Get the queryExecutionCount property: Number of query executions in this time interval.
*
* @return the queryExecutionCount value.
*/
public Long queryExecutionCount() {
return this.queryExecutionCount;
}
/**
* Set the queryExecutionCount property: Number of query executions in this time interval.
*
* @param queryExecutionCount the queryExecutionCount value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withQueryExecutionCount(Long queryExecutionCount) {
this.queryExecutionCount = queryExecutionCount;
return this;
}
/**
* Get the metricName property: Metric name.
*
* @return the metricName value.
*/
public String metricName() {
return this.metricName;
}
/**
* Set the metricName property: Metric name.
*
* @param metricName the metricName value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withMetricName(String metricName) {
this.metricName = metricName;
return this;
}
/**
* Get the metricDisplayName property: Metric display name.
*
* @return the metricDisplayName value.
*/
public String metricDisplayName() {
return this.metricDisplayName;
}
/**
* Set the metricDisplayName property: Metric display name.
*
* @param metricDisplayName the metricDisplayName value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withMetricDisplayName(String metricDisplayName) {
this.metricDisplayName = metricDisplayName;
return this;
}
/**
* Get the metricValue property: Metric value.
*
* @return the metricValue value.
*/
public Double metricValue() {
return this.metricValue;
}
/**
* Set the metricValue property: Metric value.
*
* @param metricValue the metricValue value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withMetricValue(Double metricValue) {
this.metricValue = metricValue;
return this;
}
/**
* Get the metricValueUnit property: Metric value unit.
*
* @return the metricValueUnit value.
*/
public String metricValueUnit() {
return this.metricValueUnit;
}
/**
* Set the metricValueUnit property: Metric value unit.
*
* @param metricValueUnit the metricValueUnit value to set.
* @return the QueryStatisticInner object itself.
*/
public QueryStatisticInner withMetricValueUnit(String metricValueUnit) {
this.metricValueUnit = metricValueUnit;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
}
}
| |
package org.apache.solr.cloud;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File;
import java.io.FilenameFilter;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.util.ExternalPaths;
import org.apache.solr.util.SolrCLI;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Emulates bin/solr -e cloud -noprompt; bin/post -c gettingstarted example/exampledocs/*.xml;
* this test is useful for catching regressions in indexing the example docs in collections that
* use data-driven schema and managed schema features provided by configsets/data_driven_schema_configs.
*/
public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
protected static final transient Logger log = LoggerFactory.getLogger(SolrCloudExampleTest.class);
public SolrCloudExampleTest() {
super();
sliceCount = 2;
}
@Override
public void distribSetUp() throws Exception {
super.distribSetUp();
System.setProperty("numShards", Integer.toString(sliceCount));
}
@Test
public void testLoadDocsIntoGettingStartedCollection() throws Exception {
waitForThingsToLevelOut(30000);
log.info("testLoadDocsIntoGettingStartedCollection initialized OK ... running test logic");
String testCollectionName = "gettingstarted";
File data_driven_schema_configs = new File(ExternalPaths.SCHEMALESS_CONFIGSET);
assertTrue(data_driven_schema_configs.getAbsolutePath()+" not found!", data_driven_schema_configs.isDirectory());
Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
if (liveNodes.isEmpty())
fail("No live nodes found! Cannot create a collection until there is at least 1 live node in the cluster.");
String firstLiveNode = liveNodes.iterator().next();
String solrUrl = cloudClient.getZkStateReader().getBaseUrlForNodeName(firstLiveNode);
// create the gettingstarted collection just like the bin/solr script would do
String[] args = new String[] {
"create_collection",
"-name", testCollectionName,
"-shards", "2",
"-replicationFactor", "2",
"-confname", testCollectionName,
"-confdir", "data_driven_schema_configs",
"-configsetsDir", data_driven_schema_configs.getParentFile().getParentFile().getAbsolutePath(),
"-solrUrl", solrUrl
};
SolrCLI.CreateCollectionTool tool = new SolrCLI.CreateCollectionTool();
CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
log.info("Creating the '"+testCollectionName+"' collection using SolrCLI with: "+solrUrl);
tool.runTool(cli);
assertTrue("Collection '" + testCollectionName + "' doesn't exist after trying to create it!",
cloudClient.getZkStateReader().getClusterState().hasCollection(testCollectionName));
// verify the collection is usable ...
ensureAllReplicasAreActive(testCollectionName, "shard1", 2, 2, 20);
ensureAllReplicasAreActive(testCollectionName, "shard2", 2, 2, 10);
cloudClient.setDefaultCollection(testCollectionName);
// now index docs like bin/post would do but we can't use SimplePostTool because it uses System.exit when
// it encounters an error, which JUnit doesn't like ...
log.info("Created collection, now posting example docs!");
File exampleDocsDir = new File(ExternalPaths.SOURCE_HOME, "example/exampledocs");
assertTrue(exampleDocsDir.getAbsolutePath()+" not found!", exampleDocsDir.isDirectory());
List<File> xmlFiles = Arrays.asList(exampleDocsDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".xml");
}
}));
// force a deterministic random ordering of the files so seeds reproduce regardless of platform/filesystem
Collections.sort(xmlFiles, new Comparator<File>() {
public int compare(File o1, File o2) {
// don't rely on File.compareTo, it's behavior varies by OS
return o1.getName().compareTo(o2.getName());
}
});
Collections.shuffle(xmlFiles, new Random(random().nextLong()));
// if you add/remove example XML docs, you'll have to fix these expected values
int expectedXmlFileCount = 14;
int expectedXmlDocCount = 32;
assertEquals("Unexpected # of example XML files in "+exampleDocsDir.getAbsolutePath(),
expectedXmlFileCount, xmlFiles.size());
for (File xml : xmlFiles) {
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update");
req.addFile(xml, "application/xml");
log.info("POSTing "+xml.getAbsolutePath());
cloudClient.request(req);
}
cloudClient.commit();
Thread.sleep(1000);
QueryResponse qr = cloudClient.query(new SolrQuery("*:*"));
int numFound = (int)qr.getResults().getNumFound();
assertEquals("*:* found unexpected number of documents", expectedXmlDocCount, numFound);
log.info("Updating Config for " + testCollectionName);
doTestConfigUpdate(testCollectionName, solrUrl);
log.info("Running healthcheck for " + testCollectionName);
doTestHealthcheck(testCollectionName, cloudClient.getZkHost());
// verify the delete action works too
log.info("Running delete for "+testCollectionName);
doTestDeleteAction(testCollectionName, solrUrl);
log.info("testLoadDocsIntoGettingStartedCollection succeeded ... shutting down now!");
}
protected void doTestHealthcheck(String testCollectionName, String zkHost) throws Exception {
String[] args = new String[]{
"healthcheck",
"-collection", testCollectionName,
"-zkHost", zkHost
};
SolrCLI.HealthcheckTool tool = new SolrCLI.HealthcheckTool();
CommandLine cli =
SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
assertTrue("Healthcheck action failed!", tool.runTool(cli) == 0);
}
protected void doTestDeleteAction(String testCollectionName, String solrUrl) throws Exception {
String[] args = new String[] {
"delete",
"-name", testCollectionName,
"-solrUrl", solrUrl
};
SolrCLI.DeleteTool tool = new SolrCLI.DeleteTool();
CommandLine cli =
SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
assertTrue("Delete action failed!", tool.runTool(cli) == 0);
assertTrue(!SolrCLI.safeCheckCollectionExists(solrUrl, testCollectionName)); // it should not exist anymore
}
/**
* Uses the SolrCLI config action to activate soft auto-commits for the getting started collection.
*/
protected void doTestConfigUpdate(String testCollectionName, String solrUrl) throws Exception {
if (!solrUrl.endsWith("/"))
solrUrl += "/";
String configUrl = solrUrl + testCollectionName + "/config";
Map<String, Object> configJson = SolrCLI.getJson(configUrl);
Object maxTimeFromConfig = SolrCLI.atPath("/config/updateHandler/autoSoftCommit/maxTime", configJson);
assertNotNull(maxTimeFromConfig);
assertEquals(new Long(-1L), maxTimeFromConfig);
String prop = "updateHandler.autoSoftCommit.maxTime";
Long maxTime = new Long(3000L);
String[] args = new String[]{
"config",
"-collection", testCollectionName,
"-property", prop,
"-value", maxTime.toString(),
"-solrUrl", solrUrl
};
SolrCLI.ConfigTool tool = new SolrCLI.ConfigTool();
CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
log.info("Sending set-property '" + prop + "'=" + maxTime + " to SolrCLI.ConfigTool.");
assertTrue("Set config property failed!", tool.runTool(cli) == 0);
configJson = SolrCLI.getJson(configUrl);
maxTimeFromConfig = SolrCLI.atPath("/config/updateHandler/autoSoftCommit/maxTime", configJson);
assertNotNull(maxTimeFromConfig);
assertEquals(maxTime, maxTimeFromConfig);
}
}
| |
/**
*
* Copyright 2017 Florian Erhard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package gedi.util.datastructure.collections;
import gedi.util.ArrayUtils;
import gedi.util.functions.EI;
import gedi.util.functions.ExtendedIterator;
import gedi.util.io.randomaccess.PageFile;
import gedi.util.io.randomaccess.PageFileWriter;
import gedi.util.mutable.MutableInteger;
import gedi.util.orm.ClassTree;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
/**
* Not thread-safe!
* @author erhard
*
* @param <T>
*/
public class FastSortingCollection<T> implements Collection<T>, Closeable {
private int memoryCapacity;
private ArrayList<T> mem;
private PageFileWriter buf = null;
private ArrayList<Long> offsets = new ArrayList<Long>();
private Comparator<? super T> comp;
private int size;
private ClassTree<T> tree;
private boolean compress = true;
public FastSortingCollection(T proto, Comparator<? super T> comp, int memoryCapacity) {
tree = new ClassTree<T>(proto);
this.comp = comp;
this.memoryCapacity = 128;//memoryCapacity;
mem = new ArrayList<T>(memoryCapacity);
}
public void setCompress(boolean compress) {
this.compress = compress;
}
public boolean add(T e) {
size++;
if (mem.size()>=memoryCapacity)
spillToDisk();
mem.add(e);
return true;
}
public ExtendedIterator<T> iterator() {
if (buf==null) {
Collections.sort(mem, comp);
return EI.wrap(mem);
}
spillToDisk();
try {
PageFile f = buf.read(false);
f.setUnmap(false);
ExtendedIterator<T>[] iter = new ExtendedIterator[offsets.size()];
for (int i=0; i<iter.length; i++) {
iter[i] = (ExtendedIterator<T>) f.view(i==0?0:offsets.get(i-1), offsets.get(i)).iterator(p->{
try {
int size = p.getCInt();
int rsize = size;
if (compress) rsize = p.getCInt();
p.get(buffer,0,rsize); // must always be large enough (has been used to write)
byte[] in = buffer;
if (compress) {
ArrayUtils.decompress(buffer, 0, cbuffer, 0, size);
in = cbuffer;
}
T re = (T)tree.fromBuffer(in);
return re;
} catch (Exception e) {
throw new RuntimeException("Could not deserialize object!",e);
}
}
);
}
return EI.merge(comp, iter).endAction(()->{
f.close();
});
} catch (IOException e) {
throw new RuntimeException("Cannot iterate temp file!",e);
}
}
@Override
public void close() throws IOException {
if (buf!=null) {
buf.close();
new File(buf.getPath()).delete();
buf = null;
}
mem.clear();
}
private MutableInteger mi = new MutableInteger();
private byte[] buffer = new byte[16*1024];
private byte[] cbuffer = new byte[16*1024];
private void spillToDisk() {
if (mem.size()==0) return;
Collections.sort(mem, comp);
if (buf==null)
try {
buf = new PageFileWriter(Files.createTempFile("fastsortingcollection", ".tmp").toString());
new File(buf.getPath()).deleteOnExit();
} catch (IOException e) {
throw new RuntimeException("Cannot create temp file!",e);
}
for (T e : mem)
try {
buffer = tree.toBuffer(e, buffer, mi);
byte[] out = buffer;
buf.putCInt(mi.N);
if (compress) {
while (cbuffer.length<ArrayUtils.getSaveCompressedSize(mi.N)) cbuffer = new byte[cbuffer.length*2];
mi.N = ArrayUtils.compress(buffer.clone(), 0, mi.N, cbuffer, 0);
buf.putCInt(mi.N);
out= cbuffer;
}
buf.put(out, 0, mi.N);
} catch (IOException e1) {
throw new RuntimeException("Cannot write entry !",e1);
}
offsets.add(buf.position());
mem.clear();
}
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size==0;
}
@Override
public boolean contains(Object o) {
throw new UnsupportedOperationException();
}
@Override
public Object[] toArray() {
return iterator().toArray(tree.getType());
}
@Override
public <E> E[] toArray(E[] a) {
return (E[]) iterator().toArray((T[]) a);
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends T> c) {
for (T t : c) add(t);
return true;
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
try {
close();
} catch (IOException e) {
throw new RuntimeException("Could not clear!",e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.core.file;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.KeyExtent;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.file.rfile.RFile;
import org.apache.accumulo.core.file.rfile.RFileOperations;
import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
import org.apache.accumulo.core.iterators.system.MultiIterator;
import org.apache.accumulo.core.util.CachedConfiguration;
import org.apache.accumulo.core.util.LocalityGroupUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.log4j.Logger;
public class FileUtil {
public static class FileInfo {
Key firstKey = new Key();
Key lastKey = new Key();
public FileInfo(Key firstKey, Key lastKey) {
this.firstKey = firstKey;
this.lastKey = lastKey;
}
public Text getFirstRow() {
return firstKey.getRow();
}
public Text getLastRow() {
return lastKey.getRow();
}
}
private static final Logger log = Logger.getLogger(FileUtil.class);
private static String createTmpDir(AccumuloConfiguration acuConf, FileSystem fs) throws IOException {
String accumuloDir = acuConf.get(Property.INSTANCE_DFS_DIR);
String tmpDir = null;
while (tmpDir == null) {
tmpDir = accumuloDir + "/tmp/idxReduce_" + String.format("%09d", (int) (Math.random() * Integer.MAX_VALUE));
try {
fs.getFileStatus(new Path(tmpDir));
tmpDir = null;
continue;
} catch (FileNotFoundException fne) {
// found an unused temp directory
}
fs.mkdirs(new Path(tmpDir));
// try to reserve the tmp dir
if (!fs.createNewFile(new Path(tmpDir + "/__reserve")))
tmpDir = null;
}
return tmpDir;
}
public static Collection<String> reduceFiles(AccumuloConfiguration acuConf, Configuration conf, FileSystem fs, Text prevEndRow, Text endRow,
Collection<String> mapFiles, int maxFiles, String tmpDir, int pass) throws IOException {
ArrayList<String> paths = new ArrayList<String>(mapFiles);
if (paths.size() <= maxFiles)
return paths;
String newDir = String.format("%s/pass_%04d", tmpDir, pass);
int start = 0;
ArrayList<String> outFiles = new ArrayList<String>();
int count = 0;
while (start < paths.size()) {
int end = Math.min(maxFiles + start, paths.size());
List<String> inFiles = paths.subList(start, end);
start = end;
String newMapFile = String.format("%s/%04d." + RFile.EXTENSION, newDir, count++);
outFiles.add(newMapFile);
FileSKVWriter writer = new RFileOperations().openWriter(newMapFile, fs, conf, acuConf);
writer.startDefaultLocalityGroup();
List<SortedKeyValueIterator<Key,Value>> iters = new ArrayList<SortedKeyValueIterator<Key,Value>>(inFiles.size());
FileSKVIterator reader = null;
try {
for (String s : inFiles) {
reader = FileOperations.getInstance().openIndex(s, fs, conf, acuConf);
iters.add(reader);
}
MultiIterator mmfi = new MultiIterator(iters, true);
while (mmfi.hasTop()) {
Key key = mmfi.getTopKey();
boolean gtPrevEndRow = prevEndRow == null || key.compareRow(prevEndRow) > 0;
boolean lteEndRow = endRow == null || key.compareRow(endRow) <= 0;
if (gtPrevEndRow && lteEndRow)
writer.append(key, new Value(new byte[0]));
if (!lteEndRow)
break;
mmfi.next();
}
} finally {
try {
if (reader != null)
reader.close();
} catch (IOException e) {
log.error(e, e);
}
for (SortedKeyValueIterator<Key,Value> r : iters)
try {
if (r != null)
((FileSKVIterator) r).close();
} catch (IOException e) {
// continue closing
log.error(e, e);
}
try {
if (writer != null)
writer.close();
} catch (IOException e) {
log.error(e, e);
throw e;
}
}
}
return reduceFiles(acuConf, conf, fs, prevEndRow, endRow, outFiles, maxFiles, tmpDir, pass + 1);
}
public static SortedMap<Double,Key> findMidPoint(FileSystem fs, AccumuloConfiguration acuConf, Text prevEndRow, Text endRow, Collection<String> mapFiles,
double minSplit) throws IOException {
return findMidPoint(fs, acuConf, prevEndRow, endRow, mapFiles, minSplit, true);
}
public static double estimatePercentageLTE(FileSystem fs, AccumuloConfiguration acuconf, Text prevEndRow, Text endRow, Collection<String> mapFiles,
Text splitRow) throws IOException {
Configuration conf = CachedConfiguration.getInstance();
String tmpDir = null;
int maxToOpen = acuconf.getCount(Property.TSERV_TABLET_SPLIT_FINDMIDPOINT_MAXOPEN);
ArrayList<FileSKVIterator> readers = new ArrayList<FileSKVIterator>(mapFiles.size());
try {
if (mapFiles.size() > maxToOpen) {
tmpDir = createTmpDir(acuconf, fs);
log.debug("Too many indexes (" + mapFiles.size() + ") to open at once for " + endRow + " " + prevEndRow + ", reducing in tmpDir = " + tmpDir);
long t1 = System.currentTimeMillis();
mapFiles = reduceFiles(acuconf, conf, fs, prevEndRow, endRow, mapFiles, maxToOpen, tmpDir, 0);
long t2 = System.currentTimeMillis();
log.debug("Finished reducing indexes for " + endRow + " " + prevEndRow + " in " + String.format("%6.2f secs", (t2 - t1) / 1000.0));
}
if (prevEndRow == null)
prevEndRow = new Text();
long numKeys = 0;
numKeys = countIndexEntries(acuconf, prevEndRow, endRow, mapFiles, true, conf, fs, readers);
if (numKeys == 0) {
// not enough info in the index to answer the question, so instead of going to
// the data just punt and return .5
return .5;
}
List<SortedKeyValueIterator<Key,Value>> iters = new ArrayList<SortedKeyValueIterator<Key,Value>>(readers);
MultiIterator mmfi = new MultiIterator(iters, true);
// skip the prevendrow
while (mmfi.hasTop() && mmfi.getTopKey().compareRow(prevEndRow) <= 0) {
mmfi.next();
}
int numLte = 0;
while (mmfi.hasTop() && mmfi.getTopKey().compareRow(splitRow) <= 0) {
numLte++;
mmfi.next();
}
if (numLte > numKeys) {
// something went wrong
throw new RuntimeException("numLte > numKeys " + numLte + " " + numKeys + " " + prevEndRow + " " + endRow + " " + splitRow + " " + mapFiles);
}
// do not want to return 0% or 100%, so add 1 and 2 below
return (numLte + 1) / (double) (numKeys + 2);
} finally {
cleanupIndexOp(acuconf, tmpDir, fs, readers);
}
}
/**
*
* @param mapFiles
* - list MapFiles to find the mid point key
*
* ISSUES : This method used the index files to find the mid point. If the map files have different index intervals this method will not return an
* accurate mid point. Also, it would be tricky to use this method in conjunction with an in memory map because the indexing interval is unknown.
*/
public static SortedMap<Double,Key> findMidPoint(FileSystem fs, AccumuloConfiguration acuConf, Text prevEndRow, Text endRow, Collection<String> mapFiles,
double minSplit, boolean useIndex) throws IOException {
Configuration conf = CachedConfiguration.getInstance();
Collection<String> origMapFiles = mapFiles;
String tmpDir = null;
int maxToOpen = acuConf.getCount(Property.TSERV_TABLET_SPLIT_FINDMIDPOINT_MAXOPEN);
ArrayList<FileSKVIterator> readers = new ArrayList<FileSKVIterator>(mapFiles.size());
try {
if (mapFiles.size() > maxToOpen) {
if (!useIndex)
throw new IOException("Cannot find mid point using data files, too many " + mapFiles.size());
tmpDir = createTmpDir(acuConf, fs);
log.debug("Too many indexes (" + mapFiles.size() + ") to open at once for " + endRow + " " + prevEndRow + ", reducing in tmpDir = " + tmpDir);
long t1 = System.currentTimeMillis();
mapFiles = reduceFiles(acuConf, conf, fs, prevEndRow, endRow, mapFiles, maxToOpen, tmpDir, 0);
long t2 = System.currentTimeMillis();
log.debug("Finished reducing indexes for " + endRow + " " + prevEndRow + " in " + String.format("%6.2f secs", (t2 - t1) / 1000.0));
}
if (prevEndRow == null)
prevEndRow = new Text();
long t1 = System.currentTimeMillis();
long numKeys = 0;
numKeys = countIndexEntries(acuConf, prevEndRow, endRow, mapFiles, tmpDir == null ? useIndex : false, conf, fs, readers);
if (numKeys == 0) {
if (useIndex) {
log.warn("Failed to find mid point using indexes, falling back to data files which is slower. No entries between " + prevEndRow + " and " + endRow
+ " for " + mapFiles);
// need to pass original map files, not possibly reduced indexes
return findMidPoint(fs, acuConf, prevEndRow, endRow, origMapFiles, minSplit, false);
}
throw new IOException("Failed to find mid point, no entries between " + prevEndRow + " and " + endRow + " for " + mapFiles);
}
List<SortedKeyValueIterator<Key,Value>> iters = new ArrayList<SortedKeyValueIterator<Key,Value>>(readers);
MultiIterator mmfi = new MultiIterator(iters, true);
// skip the prevendrow
while (mmfi.hasTop() && mmfi.getTopKey().compareRow(prevEndRow) <= 0)
mmfi.next();
// read half of the keys in the index
TreeMap<Double,Key> ret = new TreeMap<Double,Key>();
Key lastKey = null;
long keysRead = 0;
Key keyBeforeMidPoint = null;
long keyBeforeMidPointPosition = 0;
while (keysRead < numKeys / 2) {
if (lastKey != null && !lastKey.equals(mmfi.getTopKey(), PartialKey.ROW) && (keysRead - 1) / (double) numKeys >= minSplit) {
keyBeforeMidPoint = new Key(lastKey);
keyBeforeMidPointPosition = keysRead - 1;
}
if (lastKey == null)
lastKey = new Key();
lastKey.set(mmfi.getTopKey());
keysRead++;
// consume minimum
mmfi.next();
}
if (keyBeforeMidPoint != null)
ret.put(keyBeforeMidPointPosition / (double) numKeys, keyBeforeMidPoint);
long t2 = System.currentTimeMillis();
log.debug(String.format("Found midPoint from indexes in %6.2f secs.%n", ((t2 - t1) / 1000.0)));
ret.put(.5, mmfi.getTopKey());
// sanity check
for (Key key : ret.values()) {
boolean inRange = (key.compareRow(prevEndRow) > 0 && (endRow == null || key.compareRow(endRow) < 1));
if (!inRange) {
throw new IOException("Found mid point is not in range " + key + " " + prevEndRow + " " + endRow + " " + mapFiles);
}
}
return ret;
} finally {
cleanupIndexOp(acuConf, tmpDir, fs, readers);
}
}
private static void cleanupIndexOp(AccumuloConfiguration acuConf, String tmpDir, FileSystem fs, ArrayList<FileSKVIterator> readers) throws IOException {
// close all of the index sequence files
for (FileSKVIterator r : readers) {
try {
if (r != null)
r.close();
} catch (IOException e) {
// okay, try to close the rest anyway
log.error(e, e);
}
}
if (tmpDir != null) {
String tmpPrefix = acuConf.get(Property.INSTANCE_DFS_DIR) + "/tmp";
if (tmpDir.startsWith(tmpPrefix))
fs.delete(new Path(tmpDir), true);
else
log.error("Did not delete tmp dir because it wasn't a tmp dir " + tmpDir);
}
}
private static long countIndexEntries(AccumuloConfiguration acuConf, Text prevEndRow, Text endRow, Collection<String> mapFiles, boolean useIndex,
Configuration conf, FileSystem fs, ArrayList<FileSKVIterator> readers) throws IOException {
long numKeys = 0;
// count the total number of index entries
for (String path : mapFiles) {
FileSKVIterator reader = null;
try {
if (useIndex)
reader = FileOperations.getInstance().openIndex(path, fs, conf, acuConf);
else
reader = FileOperations.getInstance().openReader(path, new Range(prevEndRow, false, null, true), LocalityGroupUtil.EMPTY_CF_SET, false, fs, conf,
acuConf);
while (reader.hasTop()) {
Key key = reader.getTopKey();
if (endRow != null && key.compareRow(endRow) > 0)
break;
else if (prevEndRow == null || key.compareRow(prevEndRow) > 0)
numKeys++;
reader.next();
}
} finally {
try {
if (reader != null)
reader.close();
} catch (IOException e) {
log.error(e, e);
}
}
if (useIndex)
readers.add(FileOperations.getInstance().openIndex(path, fs, conf, acuConf));
else
readers.add(FileOperations.getInstance().openReader(path, new Range(prevEndRow, false, null, true), LocalityGroupUtil.EMPTY_CF_SET, false, fs, conf,
acuConf));
}
return numKeys;
}
public static Map<String,FileInfo> tryToGetFirstAndLastRows(FileSystem fs, AccumuloConfiguration acuConf, Set<String> mapfiles) {
HashMap<String,FileInfo> mapFilesInfo = new HashMap<String,FileInfo>();
Configuration conf = CachedConfiguration.getInstance();
long t1 = System.currentTimeMillis();
for (String mapfile : mapfiles) {
FileSKVIterator reader = null;
try {
reader = FileOperations.getInstance().openReader(mapfile, false, fs, conf, acuConf);
Key firstKey = reader.getFirstKey();
if (firstKey != null) {
mapFilesInfo.put(mapfile, new FileInfo(firstKey, reader.getLastKey()));
}
} catch (IOException ioe) {
log.warn("Failed to read map file to determine first and last key : " + mapfile, ioe);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ioe) {
log.warn("failed to close " + mapfile, ioe);
}
}
}
}
long t2 = System.currentTimeMillis();
log.debug(String.format("Found first and last keys for %d map files in %6.2f secs", mapfiles.size(), (t2 - t1) / 1000.0));
return mapFilesInfo;
}
public static WritableComparable<Key> findLastKey(FileSystem fs, AccumuloConfiguration acuConf, Collection<String> mapFiles) throws IOException {
Key lastKey = null;
Configuration conf = CachedConfiguration.getInstance();
for (String path : mapFiles) {
FileSKVIterator reader = FileOperations.getInstance().openReader(path, true, fs, conf, acuConf);
try {
if (!reader.hasTop())
// file is empty, so there is no last key
continue;
Key key = reader.getLastKey();
if (lastKey == null || key.compareTo(lastKey) > 0)
lastKey = key;
} finally {
try {
if (reader != null)
reader.close();
} catch (IOException e) {
log.error(e, e);
}
}
}
return lastKey;
}
private static class MLong {
public MLong(long i) {
l = i;
}
long l;
}
public static Map<KeyExtent,Long> estimateSizes(AccumuloConfiguration acuConf, Path mapFile, long fileSize, List<KeyExtent> extents, Configuration conf,
FileSystem fs) throws IOException {
long totalIndexEntries = 0;
Map<KeyExtent,MLong> counts = new TreeMap<KeyExtent,MLong>();
for (KeyExtent keyExtent : extents)
counts.put(keyExtent, new MLong(0));
Text row = new Text();
FileSKVIterator index = FileOperations.getInstance().openIndex(mapFile.toString(), fs, conf, acuConf);
try {
while (index.hasTop()) {
Key key = index.getTopKey();
totalIndexEntries++;
key.getRow(row);
for (Entry<KeyExtent,MLong> entry : counts.entrySet())
if (entry.getKey().contains(row))
entry.getValue().l++;
index.next();
}
} finally {
try {
if (index != null)
index.close();
} catch (IOException e) {
// continue with next file
log.error(e, e);
}
}
Map<KeyExtent,Long> results = new TreeMap<KeyExtent,Long>();
for (KeyExtent keyExtent : extents) {
double numEntries = counts.get(keyExtent).l;
if (numEntries == 0)
numEntries = 1;
long estSize = (long) ((numEntries / totalIndexEntries) * fileSize);
results.put(keyExtent, estSize);
}
return results;
}
public static FileSystem getFileSystem(Configuration conf, AccumuloConfiguration acuconf) throws IOException {
String uri = acuconf.get(Property.INSTANCE_DFS_URI);
if ("".equals(uri))
return FileSystem.get(conf);
else
try {
return FileSystem.get(new URI(uri), conf);
} catch (URISyntaxException e) {
throw new IOException(e);
}
}
}
| |
/**
* <copyright>
*
* Copyright (c) 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Reiner Hille-Doering (SAP AG) - initial API and implementation and/or initial documentation
*
* </copyright>
*/
package org.eclipse.bpmn2.impl;
import java.util.Collection;
import java.util.List;
import org.eclipse.bpmn2.Bpmn2Package;
import org.eclipse.bpmn2.CatchEvent;
import org.eclipse.bpmn2.DataOutput;
import org.eclipse.bpmn2.DataOutputAssociation;
import org.eclipse.bpmn2.EventDefinition;
import org.eclipse.bpmn2.OutputSet;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.EObjectResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Catch Event</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.eclipse.bpmn2.impl.CatchEventImpl#getDataOutputs <em>Data Outputs</em>}</li>
* <li>{@link org.eclipse.bpmn2.impl.CatchEventImpl#getDataOutputAssociation <em>Data Output Association</em>}</li>
* <li>{@link org.eclipse.bpmn2.impl.CatchEventImpl#getOutputSet <em>Output Set</em>}</li>
* <li>{@link org.eclipse.bpmn2.impl.CatchEventImpl#getEventDefinitions <em>Event Definitions</em>}</li>
* <li>{@link org.eclipse.bpmn2.impl.CatchEventImpl#getEventDefinitionRefs <em>Event Definition Refs</em>}</li>
* <li>{@link org.eclipse.bpmn2.impl.CatchEventImpl#isParallelMultiple <em>Parallel Multiple</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public abstract class CatchEventImpl extends EventImpl implements CatchEvent {
/**
* The cached value of the '{@link #getDataOutputs() <em>Data Outputs</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDataOutputs()
* @generated
* @ordered
*/
protected EList<DataOutput> dataOutputs;
/**
* The cached value of the '{@link #getDataOutputAssociation() <em>Data Output Association</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDataOutputAssociation()
* @generated
* @ordered
*/
protected EList<DataOutputAssociation> dataOutputAssociation;
/**
* The cached value of the '{@link #getOutputSet() <em>Output Set</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOutputSet()
* @generated
* @ordered
*/
protected OutputSet outputSet;
/**
* The cached value of the '{@link #getEventDefinitions() <em>Event Definitions</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getEventDefinitions()
* @generated
* @ordered
*/
protected EList<EventDefinition> eventDefinitions;
/**
* The cached value of the '{@link #getEventDefinitionRefs() <em>Event Definition Refs</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getEventDefinitionRefs()
* @generated
* @ordered
*/
protected EList<EventDefinition> eventDefinitionRefs;
/**
* The default value of the '{@link #isParallelMultiple() <em>Parallel Multiple</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isParallelMultiple()
* @generated
* @ordered
*/
protected static final boolean PARALLEL_MULTIPLE_EDEFAULT = false;
/**
* The cached value of the '{@link #isParallelMultiple() <em>Parallel Multiple</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isParallelMultiple()
* @generated
* @ordered
*/
protected boolean parallelMultiple = PARALLEL_MULTIPLE_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected CatchEventImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return Bpmn2Package.Literals.CATCH_EVENT;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List<DataOutput> getDataOutputs() {
if (dataOutputs == null) {
dataOutputs = new EObjectContainmentEList<DataOutput>(DataOutput.class, this,
Bpmn2Package.CATCH_EVENT__DATA_OUTPUTS);
}
return dataOutputs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List<DataOutputAssociation> getDataOutputAssociation() {
if (dataOutputAssociation == null) {
dataOutputAssociation = new EObjectContainmentEList<DataOutputAssociation>(
DataOutputAssociation.class, this,
Bpmn2Package.CATCH_EVENT__DATA_OUTPUT_ASSOCIATION);
}
return dataOutputAssociation;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public OutputSet getOutputSet() {
return outputSet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetOutputSet(OutputSet newOutputSet, NotificationChain msgs) {
OutputSet oldOutputSet = outputSet;
outputSet = newOutputSet;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET,
Bpmn2Package.CATCH_EVENT__OUTPUT_SET, oldOutputSet, newOutputSet);
if (msgs == null)
msgs = notification;
else
msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOutputSet(OutputSet newOutputSet) {
if (newOutputSet != outputSet) {
NotificationChain msgs = null;
if (outputSet != null)
msgs = ((InternalEObject) outputSet).eInverseRemove(this, EOPPOSITE_FEATURE_BASE
- Bpmn2Package.CATCH_EVENT__OUTPUT_SET, null, msgs);
if (newOutputSet != null)
msgs = ((InternalEObject) newOutputSet).eInverseAdd(this, EOPPOSITE_FEATURE_BASE
- Bpmn2Package.CATCH_EVENT__OUTPUT_SET, null, msgs);
msgs = basicSetOutputSet(newOutputSet, msgs);
if (msgs != null)
msgs.dispatch();
} else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET,
Bpmn2Package.CATCH_EVENT__OUTPUT_SET, newOutputSet, newOutputSet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List<EventDefinition> getEventDefinitions() {
if (eventDefinitions == null) {
eventDefinitions = new EObjectContainmentEList<EventDefinition>(EventDefinition.class,
this, Bpmn2Package.CATCH_EVENT__EVENT_DEFINITIONS);
}
return eventDefinitions;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List<EventDefinition> getEventDefinitionRefs() {
if (eventDefinitionRefs == null) {
eventDefinitionRefs = new EObjectResolvingEList<EventDefinition>(EventDefinition.class,
this, Bpmn2Package.CATCH_EVENT__EVENT_DEFINITION_REFS);
}
return eventDefinitionRefs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isParallelMultiple() {
return parallelMultiple;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setParallelMultiple(boolean newParallelMultiple) {
boolean oldParallelMultiple = parallelMultiple;
parallelMultiple = newParallelMultiple;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET,
Bpmn2Package.CATCH_EVENT__PARALLEL_MULTIPLE, oldParallelMultiple,
parallelMultiple));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID,
NotificationChain msgs) {
switch (featureID) {
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUTS:
return ((InternalEList<?>) getDataOutputs()).basicRemove(otherEnd, msgs);
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUT_ASSOCIATION:
return ((InternalEList<?>) getDataOutputAssociation()).basicRemove(otherEnd, msgs);
case Bpmn2Package.CATCH_EVENT__OUTPUT_SET:
return basicSetOutputSet(null, msgs);
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITIONS:
return ((InternalEList<?>) getEventDefinitions()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUTS:
return getDataOutputs();
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUT_ASSOCIATION:
return getDataOutputAssociation();
case Bpmn2Package.CATCH_EVENT__OUTPUT_SET:
return getOutputSet();
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITIONS:
return getEventDefinitions();
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITION_REFS:
return getEventDefinitionRefs();
case Bpmn2Package.CATCH_EVENT__PARALLEL_MULTIPLE:
return isParallelMultiple();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUTS:
getDataOutputs().clear();
getDataOutputs().addAll((Collection<? extends DataOutput>) newValue);
return;
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUT_ASSOCIATION:
getDataOutputAssociation().clear();
getDataOutputAssociation().addAll(
(Collection<? extends DataOutputAssociation>) newValue);
return;
case Bpmn2Package.CATCH_EVENT__OUTPUT_SET:
setOutputSet((OutputSet) newValue);
return;
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITIONS:
getEventDefinitions().clear();
getEventDefinitions().addAll((Collection<? extends EventDefinition>) newValue);
return;
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITION_REFS:
getEventDefinitionRefs().clear();
getEventDefinitionRefs().addAll((Collection<? extends EventDefinition>) newValue);
return;
case Bpmn2Package.CATCH_EVENT__PARALLEL_MULTIPLE:
setParallelMultiple((Boolean) newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUTS:
getDataOutputs().clear();
return;
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUT_ASSOCIATION:
getDataOutputAssociation().clear();
return;
case Bpmn2Package.CATCH_EVENT__OUTPUT_SET:
setOutputSet((OutputSet) null);
return;
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITIONS:
getEventDefinitions().clear();
return;
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITION_REFS:
getEventDefinitionRefs().clear();
return;
case Bpmn2Package.CATCH_EVENT__PARALLEL_MULTIPLE:
setParallelMultiple(PARALLEL_MULTIPLE_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUTS:
return dataOutputs != null && !dataOutputs.isEmpty();
case Bpmn2Package.CATCH_EVENT__DATA_OUTPUT_ASSOCIATION:
return dataOutputAssociation != null && !dataOutputAssociation.isEmpty();
case Bpmn2Package.CATCH_EVENT__OUTPUT_SET:
return outputSet != null;
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITIONS:
return eventDefinitions != null && !eventDefinitions.isEmpty();
case Bpmn2Package.CATCH_EVENT__EVENT_DEFINITION_REFS:
return eventDefinitionRefs != null && !eventDefinitionRefs.isEmpty();
case Bpmn2Package.CATCH_EVENT__PARALLEL_MULTIPLE:
return parallelMultiple != PARALLEL_MULTIPLE_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy())
return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (parallelMultiple: ");
result.append(parallelMultiple);
result.append(')');
return result.toString();
}
} //CatchEventImpl
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.engine;
import com.carrotsearch.hppc.ObjectIntHashMap;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.index.SegmentInfos;
import org.elasticsearch.common.lucene.FilterIndexCommit;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogDeletionPolicy;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.LongSupplier;
/**
* An {@link IndexDeletionPolicy} that coordinates between Lucene's commits and the retention of translog generation files,
* making sure that all translog files that are needed to recover from the Lucene commit are not deleted.
* <p>
* In particular, this policy will delete index commits whose max sequence number is at most
* the current global checkpoint except the index commit which has the highest max sequence number among those.
*/
public class CombinedDeletionPolicy extends IndexDeletionPolicy {
private final Logger logger;
private final TranslogDeletionPolicy translogDeletionPolicy;
private final SoftDeletesPolicy softDeletesPolicy;
private final LongSupplier globalCheckpointSupplier;
private final ObjectIntHashMap<IndexCommit> snapshottedCommits; // Number of snapshots held against each commit point.
private volatile IndexCommit safeCommit; // the most recent safe commit point - its max_seqno at most the persisted global checkpoint.
private volatile long maxSeqNoOfNextSafeCommit;
private volatile IndexCommit lastCommit; // the most recent commit point
private volatile SafeCommitInfo safeCommitInfo = SafeCommitInfo.EMPTY;
CombinedDeletionPolicy(Logger logger, TranslogDeletionPolicy translogDeletionPolicy,
SoftDeletesPolicy softDeletesPolicy, LongSupplier globalCheckpointSupplier) {
this.logger = logger;
this.translogDeletionPolicy = translogDeletionPolicy;
this.softDeletesPolicy = softDeletesPolicy;
this.globalCheckpointSupplier = globalCheckpointSupplier;
this.snapshottedCommits = new ObjectIntHashMap<>();
}
@Override
public void onInit(List<? extends IndexCommit> commits) throws IOException {
assert commits.isEmpty() == false : "index is opened, but we have no commits";
onCommit(commits);
if (safeCommit != commits.get(commits.size() - 1)) {
throw new IllegalStateException("Engine is opened, but the last commit isn't safe. Global checkpoint ["
+ globalCheckpointSupplier.getAsLong() + "], seqNo is last commit ["
+ SequenceNumbers.loadSeqNoInfoFromLuceneCommit(lastCommit.getUserData().entrySet()) + "], "
+ "seqNos in safe commit [" + SequenceNumbers.loadSeqNoInfoFromLuceneCommit(safeCommit.getUserData().entrySet()) + "]");
}
}
@Override
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
final IndexCommit safeCommit;
synchronized (this) {
final int keptPosition = indexOfKeptCommits(commits, globalCheckpointSupplier.getAsLong());
this.safeCommitInfo = SafeCommitInfo.EMPTY;
this.lastCommit = commits.get(commits.size() - 1);
this.safeCommit = commits.get(keptPosition);
for (int i = 0; i < keptPosition; i++) {
if (snapshottedCommits.containsKey(commits.get(i)) == false) {
deleteCommit(commits.get(i));
}
}
updateRetentionPolicy();
if (keptPosition == commits.size() - 1) {
this.maxSeqNoOfNextSafeCommit = Long.MAX_VALUE;
} else {
this.maxSeqNoOfNextSafeCommit = Long.parseLong(commits.get(keptPosition + 1).getUserData().get(SequenceNumbers.MAX_SEQ_NO));
}
safeCommit = this.safeCommit;
}
assert Thread.holdsLock(this) == false : "should not block concurrent acquire or relesase";
safeCommitInfo = new SafeCommitInfo(Long.parseLong(
safeCommit.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)), getDocCountOfCommit(safeCommit));
// This is protected from concurrent calls by a lock on the IndexWriter, but this assertion makes sure that we notice if that ceases
// to be true in future. It is not disastrous if safeCommitInfo refers to an older safeCommit, it just means that we might retain a
// bit more history and do a few more ops-based recoveries than we would otherwise.
final IndexCommit newSafeCommit = this.safeCommit;
assert safeCommit == newSafeCommit
: "onCommit called concurrently? " + safeCommit.getGeneration() + " vs " + newSafeCommit.getGeneration();
}
private void deleteCommit(IndexCommit commit) throws IOException {
assert commit.isDeleted() == false : "Index commit [" + commitDescription(commit) + "] is deleted twice";
logger.debug("Delete index commit [{}]", commitDescription(commit));
commit.delete();
assert commit.isDeleted() : "Deletion commit [" + commitDescription(commit) + "] was suppressed";
}
private void updateRetentionPolicy() throws IOException {
assert Thread.holdsLock(this);
logger.debug("Safe commit [{}], last commit [{}]", commitDescription(safeCommit), commitDescription(lastCommit));
assert safeCommit.isDeleted() == false : "The safe commit must not be deleted";
assert lastCommit.isDeleted() == false : "The last commit must not be deleted";
final long localCheckpointOfSafeCommit = Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
softDeletesPolicy.setLocalCheckpointOfSafeCommit(localCheckpointOfSafeCommit);
translogDeletionPolicy.setLocalCheckpointOfSafeCommit(localCheckpointOfSafeCommit);
}
protected int getDocCountOfCommit(IndexCommit indexCommit) throws IOException {
return SegmentInfos.readCommit(indexCommit.getDirectory(), indexCommit.getSegmentsFileName()).totalMaxDoc();
}
SafeCommitInfo getSafeCommitInfo() {
return safeCommitInfo;
}
/**
* Captures the most recent commit point {@link #lastCommit} or the most recent safe commit point {@link #safeCommit}.
* Index files of the capturing commit point won't be released until the commit reference is closed.
*
* @param acquiringSafeCommit captures the most recent safe commit point if true; otherwise captures the most recent commit point.
*/
synchronized IndexCommit acquireIndexCommit(boolean acquiringSafeCommit) {
assert safeCommit != null : "Safe commit is not initialized yet";
assert lastCommit != null : "Last commit is not initialized yet";
final IndexCommit snapshotting = acquiringSafeCommit ? safeCommit : lastCommit;
snapshottedCommits.addTo(snapshotting, 1); // increase refCount
return new SnapshotIndexCommit(snapshotting);
}
/**
* Releases an index commit that acquired by {@link #acquireIndexCommit(boolean)}.
*
* @return true if the snapshotting commit can be clean up.
*/
synchronized boolean releaseCommit(final IndexCommit snapshotCommit) {
final IndexCommit releasingCommit = ((SnapshotIndexCommit) snapshotCommit).getIndexCommit();
assert snapshottedCommits.containsKey(releasingCommit) : "Release non-snapshotted commit;" +
"snapshotted commits [" + snapshottedCommits + "], releasing commit [" + releasingCommit + "]";
final int refCount = snapshottedCommits.addTo(releasingCommit, -1); // release refCount
assert refCount >= 0 : "Number of snapshots can not be negative [" + refCount + "]";
if (refCount == 0) {
snapshottedCommits.remove(releasingCommit);
}
// The commit can be clean up only if no pending snapshot and it is neither the safe commit nor last commit.
return refCount == 0 && releasingCommit.equals(safeCommit) == false && releasingCommit.equals(lastCommit) == false;
}
/**
* Find a safe commit point from a list of existing commits based on the supplied global checkpoint.
* The max sequence number of a safe commit point should be at most the global checkpoint.
* If an index was created before 6.2 or recovered from remote, we might not have a safe commit.
* In this case, this method will return the oldest index commit.
*
* @param commits a list of existing commit points
* @param globalCheckpoint the persisted global checkpoint from the translog, see {@link Translog#readGlobalCheckpoint(Path, String)}
* @return a safe commit or the oldest commit if a safe commit is not found
*/
public static IndexCommit findSafeCommitPoint(List<IndexCommit> commits, long globalCheckpoint) throws IOException {
if (commits.isEmpty()) {
throw new IllegalArgumentException("Commit list must not empty");
}
final int keptPosition = indexOfKeptCommits(commits, globalCheckpoint);
return commits.get(keptPosition);
}
/**
* Find the highest index position of a safe index commit whose max sequence number is not greater than the global checkpoint.
* Index commits with different translog UUID will be filtered out as they don't belong to this engine.
*/
private static int indexOfKeptCommits(List<? extends IndexCommit> commits, long globalCheckpoint) throws IOException {
final String expectedTranslogUUID = commits.get(commits.size() - 1).getUserData().get(Translog.TRANSLOG_UUID_KEY);
// Commits are sorted by age (the 0th one is the oldest commit).
for (int i = commits.size() - 1; i >= 0; i--) {
final Map<String, String> commitUserData = commits.get(i).getUserData();
// Ignore index commits with different translog uuid.
if (expectedTranslogUUID.equals(commitUserData.get(Translog.TRANSLOG_UUID_KEY)) == false) {
return i + 1;
}
final long maxSeqNoFromCommit = Long.parseLong(commitUserData.get(SequenceNumbers.MAX_SEQ_NO));
if (maxSeqNoFromCommit <= globalCheckpoint) {
return i;
}
}
// If an index was created before 6.2 or recovered from remote, we might not have a safe commit.
// In this case, we return the oldest index commit instead.
return 0;
}
/**
* Checks whether the deletion policy is holding on to snapshotted commits
*/
synchronized boolean hasSnapshottedCommits() {
return snapshottedCommits.isEmpty() == false;
}
/**
* Checks if the deletion policy can delete some index commits with the latest global checkpoint.
*/
boolean hasUnreferencedCommits() {
return maxSeqNoOfNextSafeCommit <= globalCheckpointSupplier.getAsLong();
}
/**
* Returns a description for a given {@link IndexCommit}. This should be only used for logging and debugging.
*/
public static String commitDescription(IndexCommit commit) throws IOException {
return String.format(Locale.ROOT, "CommitPoint{segment[%s], userData[%s]}", commit.getSegmentsFileName(), commit.getUserData());
}
/**
* A wrapper of an index commit that prevents it from being deleted.
*/
private static class SnapshotIndexCommit extends FilterIndexCommit {
SnapshotIndexCommit(IndexCommit delegate) {
super(delegate);
}
@Override
public void delete() {
throw new UnsupportedOperationException("A snapshot commit does not support deletion");
}
}
}
| |
/*
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.support.saml.util;
import org.jdom.Document;
import org.jdom.input.DOMBuilder;
import org.jdom.input.SAXBuilder;
import org.jdom.output.XMLOutputter;
import org.opensaml.Configuration;
import org.opensaml.DefaultBootstrap;
import org.opensaml.common.SAMLObject;
import org.opensaml.common.SAMLObjectBuilder;
import org.opensaml.common.impl.SecureRandomIdentifierGenerator;
import org.opensaml.common.xml.SAMLConstants;
import org.opensaml.xml.ConfigurationException;
import org.opensaml.xml.XMLObject;
import org.opensaml.xml.io.Marshaller;
import org.opensaml.xml.io.MarshallerFactory;
import org.opensaml.xml.schema.XSString;
import org.opensaml.xml.schema.impl.XSStringBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import javax.xml.crypto.dsig.CanonicalizationMethod;
import javax.xml.crypto.dsig.DigestMethod;
import javax.xml.crypto.dsig.Reference;
import javax.xml.crypto.dsig.SignatureMethod;
import javax.xml.crypto.dsig.SignedInfo;
import javax.xml.crypto.dsig.Transform;
import javax.xml.crypto.dsig.XMLSignature;
import javax.xml.crypto.dsig.XMLSignatureFactory;
import javax.xml.crypto.dsig.dom.DOMSignContext;
import javax.xml.crypto.dsig.keyinfo.KeyInfo;
import javax.xml.crypto.dsig.keyinfo.KeyInfoFactory;
import javax.xml.crypto.dsig.keyinfo.KeyValue;
import javax.xml.crypto.dsig.spec.C14NMethodParameterSpec;
import javax.xml.crypto.dsig.spec.TransformParameterSpec;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.io.ByteArrayInputStream;
import java.io.StringWriter;
import java.lang.reflect.Field;
import java.nio.charset.Charset;
import java.security.PrivateKey;
import java.security.Provider;
import java.security.PublicKey;
import java.security.interfaces.DSAPublicKey;
import java.security.interfaces.RSAPublicKey;
import java.util.Collections;
import java.util.List;
/**
* An abstract builder to serve as the template handler
* for SAML1 and SAML2 responses.
*
* @author Misagh Moayyed mmoayyed@unicon.net
* @since 4.1
*/
public abstract class AbstractSamlObjectBuilder {
/**
* The constant DEFAULT_ELEMENT_NAME_FIELD.
*/
protected static final String DEFAULT_ELEMENT_NAME_FIELD = "DEFAULT_ELEMENT_NAME";
/**
* The constant DEFAULT_ELEMENT_LOCAL_NAME_FIELD.
*/
protected static final String DEFAULT_ELEMENT_LOCAL_NAME_FIELD = "DEFAULT_ELEMENT_LOCAL_NAME";
/** Logger instance. **/
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* Initialize and bootstrap opensaml.
* Check for prior OpenSAML initialization to prevent double init
* that would overwrite existing OpenSAML configuration.
*/
static {
try {
if (Configuration.getParserPool() == null) {
DefaultBootstrap.bootstrap();
}
} catch (final ConfigurationException e) {
throw new IllegalStateException("Error initializing OpenSAML library.", e);
}
}
/**
* Create a new SAML object.
*
* @param <T> the generic type
* @param objectType the object type
* @return the t
*/
public final <T extends SAMLObject> T newSamlObject(final Class<T> objectType) {
final QName qName = getSamlObjectQName(objectType);
final SAMLObjectBuilder<T> builder = (SAMLObjectBuilder<T>) Configuration.getBuilderFactory().getBuilder(qName);
if (builder == null) {
throw new IllegalStateException("No SAMLObjectBuilder registered for class " + objectType.getName());
}
return objectType.cast(builder.buildObject(qName));
}
/**
* Gets saml object QName.
*
* @param objectType the object type
* @return the saml object QName
* @throws RuntimeException the exception
*/
public QName getSamlObjectQName(final Class objectType) throws RuntimeException {
try {
final Field f = objectType.getField(DEFAULT_ELEMENT_NAME_FIELD);
final QName qName = (QName) f.get(null);
return qName;
} catch (final NoSuchFieldException e) {
throw new IllegalStateException("Cannot find field " + objectType.getName() + "." + DEFAULT_ELEMENT_NAME_FIELD);
} catch (final IllegalAccessException e) {
throw new IllegalStateException("Cannot access field " + objectType.getName() + "." + DEFAULT_ELEMENT_NAME_FIELD);
}
}
/**
* Build the saml object based on its QName.
*
* @param objectType the object
* @param qName the QName
* @param <T> the object type
* @return the saml object
*/
private <T extends SAMLObject> T newSamlObject(final Class<T> objectType, final QName qName) {
final SAMLObjectBuilder<T> builder = (SAMLObjectBuilder<T>) Configuration.getBuilderFactory().getBuilder(qName);
if (builder == null) {
throw new IllegalStateException("No SAMLObjectBuilder registered for class " + objectType.getName());
}
return objectType.cast(builder.buildObject());
}
/**
* New attribute value.
*
* @param value the value
* @param elementName the element name
* @return the xS string
*/
protected final XSString newAttributeValue(final Object value, final QName elementName) {
final XSStringBuilder attrValueBuilder = new XSStringBuilder();
final XSString stringValue = attrValueBuilder.buildObject(elementName, XSString.TYPE_NAME);
if (value instanceof String) {
stringValue.setValue((String) value);
} else {
stringValue.setValue(value.toString());
}
return stringValue;
}
/**
* Generate a secure random id.
*
* @return the secure id string
*/
public String generateSecureRandomId() {
try {
final SecureRandomIdentifierGenerator idGenerator = new SecureRandomIdentifierGenerator();
return idGenerator.generateIdentifier();
} catch (final Exception e) {
throw new IllegalStateException("Cannot create secure random ID generator for SAML message IDs.", e);
}
}
/**
* Marshal the saml xml object to raw xml.
*
* @param object the object
* @param writer the writer
* @return the xml string
*/
public String marshalSamlXmlObject(final XMLObject object, final StringWriter writer) {
try {
final MarshallerFactory marshallerFactory = Configuration.getMarshallerFactory();
final Marshaller marshaller = marshallerFactory.getMarshaller(object);
final Element element = marshaller.marshall(object);
element.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns", SAMLConstants.SAML20_NS);
element.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:xenc", "http://www.w3.org/2001/04/xmlenc#");
final TransformerFactory transFactory = TransformerFactory.newInstance();
final Transformer transformer = transFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.transform(new DOMSource(element), new StreamResult(writer));
return writer.toString();
} catch (final Exception e) {
throw new IllegalStateException("An error has occurred while marshalling SAML object to xml", e);
}
}
/**
* Sign SAML response.
*
* @param samlResponse the SAML response
* @param privateKey the private key
* @param publicKey the public key
* @return the response
*/
public final String signSamlResponse(final String samlResponse,
final PrivateKey privateKey, final PublicKey publicKey) {
final Document doc = constructDocumentFromXml(samlResponse);
if (doc != null) {
final org.jdom.Element signedElement = signSamlElement(doc.getRootElement(),
privateKey, publicKey);
doc.setRootElement((org.jdom.Element) signedElement.detach());
return new XMLOutputter().outputString(doc);
}
throw new RuntimeException("Error signing SAML Response: Null document");
}
/**
* Construct document from xml string.
*
* @param xmlString the xml string
* @return the document
*/
public static Document constructDocumentFromXml(final String xmlString) {
try {
final SAXBuilder builder = new SAXBuilder();
builder.setFeature("http://xml.org/sax/features/external-general-entities", false);
builder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
return builder
.build(new ByteArrayInputStream(xmlString.getBytes(Charset.defaultCharset())));
} catch (final Exception e) {
return null;
}
}
/**
* Sign SAML element.
*
* @param element the element
* @param privKey the priv key
* @param pubKey the pub key
* @return the element
*/
private static org.jdom.Element signSamlElement(final org.jdom.Element element, final PrivateKey privKey,
final PublicKey pubKey) {
try {
final String providerName = System.getProperty("jsr105Provider",
"org.jcp.xml.dsig.internal.dom.XMLDSigRI");
final XMLSignatureFactory sigFactory = XMLSignatureFactory
.getInstance("DOM", (Provider) Class.forName(providerName)
.newInstance());
final List<Transform> envelopedTransform = Collections
.singletonList(sigFactory.newTransform(Transform.ENVELOPED,
(TransformParameterSpec) null));
final Reference ref = sigFactory.newReference("", sigFactory
.newDigestMethod(DigestMethod.SHA1, null), envelopedTransform,
null, null);
// Create the SignatureMethod based on the type of key
SignatureMethod signatureMethod;
if (pubKey instanceof DSAPublicKey) {
signatureMethod = sigFactory.newSignatureMethod(
SignatureMethod.DSA_SHA1, null);
} else if (pubKey instanceof RSAPublicKey) {
signatureMethod = sigFactory.newSignatureMethod(
SignatureMethod.RSA_SHA1, null);
} else {
throw new RuntimeException("Error signing SAML element: Unsupported type of key");
}
final CanonicalizationMethod canonicalizationMethod = sigFactory
.newCanonicalizationMethod(
CanonicalizationMethod.INCLUSIVE_WITH_COMMENTS,
(C14NMethodParameterSpec) null);
// Create the SignedInfo
final SignedInfo signedInfo = sigFactory.newSignedInfo(
canonicalizationMethod, signatureMethod, Collections
.singletonList(ref));
// Create a KeyValue containing the DSA or RSA PublicKey
final KeyInfoFactory keyInfoFactory = sigFactory
.getKeyInfoFactory();
final KeyValue keyValuePair = keyInfoFactory.newKeyValue(pubKey);
// Create a KeyInfo and add the KeyValue to it
final KeyInfo keyInfo = keyInfoFactory.newKeyInfo(Collections
.singletonList(keyValuePair));
// Convert the JDOM document to w3c (Java XML signature API requires
// w3c
// representation)
final org.w3c.dom.Element w3cElement = toDom(element);
// Create a DOMSignContext and specify the DSA/RSA PrivateKey and
// location of the resulting XMLSignature's parent element
final DOMSignContext dsc = new DOMSignContext(privKey, w3cElement);
final org.w3c.dom.Node xmlSigInsertionPoint = getXmlSignatureInsertLocation(w3cElement);
dsc.setNextSibling(xmlSigInsertionPoint);
// Marshal, generate (and sign) the enveloped signature
final XMLSignature signature = sigFactory.newXMLSignature(signedInfo,
keyInfo);
signature.sign(dsc);
return toJdom(w3cElement);
} catch (final Exception e) {
throw new RuntimeException("Error signing SAML element: "
+ e.getMessage(), e);
}
}
/**
* Gets the xml signature insert location.
*
* @param elem the elem
* @return the xml signature insert location
*/
private static Node getXmlSignatureInsertLocation(final org.w3c.dom.Element elem) {
org.w3c.dom.Node insertLocation = null;
org.w3c.dom.NodeList nodeList = elem.getElementsByTagNameNS(
SAMLConstants.SAML20P_NS, "Extensions");
if (nodeList.getLength() != 0) {
insertLocation = nodeList.item(nodeList.getLength() - 1);
} else {
nodeList = elem.getElementsByTagNameNS(SAMLConstants.SAML20P_NS, "Status");
insertLocation = nodeList.item(nodeList.getLength() - 1);
}
return insertLocation;
}
/**
* Convert the received jdom element to an Element.
*
* @param element the element
* @return the org.w3c.dom. element
*/
private static org.w3c.dom.Element toDom(final org.jdom.Element element) {
return toDom(element.getDocument()).getDocumentElement();
}
/**
* Convert the received jdom doc to a Document element.
*
* @param doc the doc
* @return the org.w3c.dom. document
*/
private static org.w3c.dom.Document toDom(final Document doc) {
try {
final XMLOutputter xmlOutputter = new XMLOutputter();
final StringWriter elemStrWriter = new StringWriter();
xmlOutputter.output(doc, elemStrWriter);
final byte[] xmlBytes = elemStrWriter.toString().getBytes(Charset.defaultCharset());
final DocumentBuilderFactory dbf = DocumentBuilderFactory
.newInstance();
dbf.setNamespaceAware(true);
return dbf.newDocumentBuilder().parse(
new ByteArrayInputStream(xmlBytes));
} catch (final Exception e) {
return null;
}
}
/**
* Convert to a jdom element.
*
* @param e the e
* @return the element
*/
private static org.jdom.Element toJdom(final org.w3c.dom.Element e) {
return new DOMBuilder().build(e);
}
}
| |
package org.openestate.io.immoxml.xml;
import java.io.Serializable;
import javax.annotation.Generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.jvnet.jaxb2_commons.lang.CopyStrategy2;
import org.jvnet.jaxb2_commons.lang.CopyTo2;
import org.jvnet.jaxb2_commons.lang.Equals2;
import org.jvnet.jaxb2_commons.lang.EqualsStrategy2;
import org.jvnet.jaxb2_commons.lang.JAXBCopyStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy;
import org.jvnet.jaxb2_commons.lang.ToString2;
import org.jvnet.jaxb2_commons.lang.ToStringStrategy2;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
/**
* Java class for <nutzungsart> element.
*
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
@XmlRootElement(name = "nutzungsart")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public class Nutzungsart implements Serializable, Cloneable, CopyTo2, Equals2, ToString2
{
@XmlAttribute(name = "WOHNEN")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
protected Boolean wohnen;
@XmlAttribute(name = "GEWERBE")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
protected Boolean gewerbe;
@XmlAttribute(name = "ANLAGE")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
protected Boolean anlage;
@XmlAttribute(name = "WAZ")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
protected Boolean waz;
/**
* Gets the value of the wohnen property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Boolean getWOHNEN() {
return wohnen;
}
/**
* Sets the value of the wohnen property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public void setWOHNEN(Boolean value) {
this.wohnen = value;
}
/**
* Gets the value of the gewerbe property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Boolean getGEWERBE() {
return gewerbe;
}
/**
* Sets the value of the gewerbe property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public void setGEWERBE(Boolean value) {
this.gewerbe = value;
}
/**
* Gets the value of the anlage property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Boolean getANLAGE() {
return anlage;
}
/**
* Sets the value of the anlage property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public void setANLAGE(Boolean value) {
this.anlage = value;
}
/**
* Gets the value of the waz property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Boolean getWAZ() {
return waz;
}
/**
* Sets the value of the waz property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public void setWAZ(Boolean value) {
this.waz = value;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public String toString() {
final ToStringStrategy2 strategy = JAXBToStringStrategy.INSTANCE2;
final StringBuilder buffer = new StringBuilder();
append(null, buffer, strategy);
return buffer.toString();
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) {
strategy.appendStart(locator, this, buffer);
appendFields(locator, buffer, strategy);
strategy.appendEnd(locator, this, buffer);
return buffer;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) {
{
Boolean theWOHNEN;
theWOHNEN = this.getWOHNEN();
strategy.appendField(locator, this, "wohnen", buffer, theWOHNEN, (this.wohnen!= null));
}
{
Boolean theGEWERBE;
theGEWERBE = this.getGEWERBE();
strategy.appendField(locator, this, "gewerbe", buffer, theGEWERBE, (this.gewerbe!= null));
}
{
Boolean theANLAGE;
theANLAGE = this.getANLAGE();
strategy.appendField(locator, this, "anlage", buffer, theANLAGE, (this.anlage!= null));
}
{
Boolean theWAZ;
theWAZ = this.getWAZ();
strategy.appendField(locator, this, "waz", buffer, theWAZ, (this.waz!= null));
}
return buffer;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Object clone() {
return copyTo(createNewInstance());
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Object copyTo(Object target) {
final CopyStrategy2 strategy = JAXBCopyStrategy.INSTANCE2;
return copyTo(null, target, strategy);
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Object copyTo(ObjectLocator locator, Object target, CopyStrategy2 strategy) {
final Object draftCopy = ((target == null)?createNewInstance():target);
if (draftCopy instanceof Nutzungsart) {
final Nutzungsart copy = ((Nutzungsart) draftCopy);
{
Boolean wohnenShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.wohnen!= null));
if (wohnenShouldBeCopiedAndSet == Boolean.TRUE) {
Boolean sourceWOHNEN;
sourceWOHNEN = this.getWOHNEN();
Boolean copyWOHNEN = ((Boolean) strategy.copy(LocatorUtils.property(locator, "wohnen", sourceWOHNEN), sourceWOHNEN, (this.wohnen!= null)));
copy.setWOHNEN(copyWOHNEN);
} else {
if (wohnenShouldBeCopiedAndSet == Boolean.FALSE) {
copy.wohnen = null;
}
}
}
{
Boolean gewerbeShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.gewerbe!= null));
if (gewerbeShouldBeCopiedAndSet == Boolean.TRUE) {
Boolean sourceGEWERBE;
sourceGEWERBE = this.getGEWERBE();
Boolean copyGEWERBE = ((Boolean) strategy.copy(LocatorUtils.property(locator, "gewerbe", sourceGEWERBE), sourceGEWERBE, (this.gewerbe!= null)));
copy.setGEWERBE(copyGEWERBE);
} else {
if (gewerbeShouldBeCopiedAndSet == Boolean.FALSE) {
copy.gewerbe = null;
}
}
}
{
Boolean anlageShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.anlage!= null));
if (anlageShouldBeCopiedAndSet == Boolean.TRUE) {
Boolean sourceANLAGE;
sourceANLAGE = this.getANLAGE();
Boolean copyANLAGE = ((Boolean) strategy.copy(LocatorUtils.property(locator, "anlage", sourceANLAGE), sourceANLAGE, (this.anlage!= null)));
copy.setANLAGE(copyANLAGE);
} else {
if (anlageShouldBeCopiedAndSet == Boolean.FALSE) {
copy.anlage = null;
}
}
}
{
Boolean wazShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.waz!= null));
if (wazShouldBeCopiedAndSet == Boolean.TRUE) {
Boolean sourceWAZ;
sourceWAZ = this.getWAZ();
Boolean copyWAZ = ((Boolean) strategy.copy(LocatorUtils.property(locator, "waz", sourceWAZ), sourceWAZ, (this.waz!= null)));
copy.setWAZ(copyWAZ);
} else {
if (wazShouldBeCopiedAndSet == Boolean.FALSE) {
copy.waz = null;
}
}
}
}
return draftCopy;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public Object createNewInstance() {
return new Nutzungsart();
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy2 strategy) {
if ((object == null)||(this.getClass()!= object.getClass())) {
return false;
}
if (this == object) {
return true;
}
final Nutzungsart that = ((Nutzungsart) object);
{
Boolean lhsWOHNEN;
lhsWOHNEN = this.getWOHNEN();
Boolean rhsWOHNEN;
rhsWOHNEN = that.getWOHNEN();
if (!strategy.equals(LocatorUtils.property(thisLocator, "wohnen", lhsWOHNEN), LocatorUtils.property(thatLocator, "wohnen", rhsWOHNEN), lhsWOHNEN, rhsWOHNEN, (this.wohnen!= null), (that.wohnen!= null))) {
return false;
}
}
{
Boolean lhsGEWERBE;
lhsGEWERBE = this.getGEWERBE();
Boolean rhsGEWERBE;
rhsGEWERBE = that.getGEWERBE();
if (!strategy.equals(LocatorUtils.property(thisLocator, "gewerbe", lhsGEWERBE), LocatorUtils.property(thatLocator, "gewerbe", rhsGEWERBE), lhsGEWERBE, rhsGEWERBE, (this.gewerbe!= null), (that.gewerbe!= null))) {
return false;
}
}
{
Boolean lhsANLAGE;
lhsANLAGE = this.getANLAGE();
Boolean rhsANLAGE;
rhsANLAGE = that.getANLAGE();
if (!strategy.equals(LocatorUtils.property(thisLocator, "anlage", lhsANLAGE), LocatorUtils.property(thatLocator, "anlage", rhsANLAGE), lhsANLAGE, rhsANLAGE, (this.anlage!= null), (that.anlage!= null))) {
return false;
}
}
{
Boolean lhsWAZ;
lhsWAZ = this.getWAZ();
Boolean rhsWAZ;
rhsWAZ = that.getWAZ();
if (!strategy.equals(LocatorUtils.property(thisLocator, "waz", lhsWAZ), LocatorUtils.property(thatLocator, "waz", rhsWAZ), lhsWAZ, rhsWAZ, (this.waz!= null), (that.waz!= null))) {
return false;
}
}
return true;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:30:44+02:00", comments = "JAXB RI v2.3.0")
public boolean equals(Object object) {
final EqualsStrategy2 strategy = JAXBEqualsStrategy.INSTANCE2;
return equals(null, null, object, strategy);
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.3 in JDK 1.6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2009.05.24 at 08:50:20 PM NZST
//
package com.rabidgremlin.fddreport.bindings;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlIDREF;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.datatype.XMLGregorianCalendar;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="FeatureSets">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element name="FeatureSet">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="FeatureSetId" use="required" type="{http://www.w3.org/2001/XMLSchema}ID" />
* <attribute name="Name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="Features">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element name="Feature">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="Name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="FeatureSet" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" />
* <attribute name="DomainWalkthroughPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DomainWalkthroughActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignReviewPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignReviewActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodePlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeReviewPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeReviewActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="PromoteToBuildPlanned" use="required" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="PromoteToBuildActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* <attribute name="ProjectName" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="StartDate" use="required" type="{http://www.w3.org/2001/XMLSchema}date" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"featureSets",
"features"
})
@XmlRootElement(name = "Project")
public class Project {
@XmlElement(name = "FeatureSets", required = true)
protected Project.FeatureSets featureSets;
@XmlElement(name = "Features", required = true)
protected Project.Features features;
@XmlAttribute(name = "ProjectName", required = true)
protected String projectName;
@XmlAttribute(name = "StartDate", required = true)
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar startDate;
/**
* Gets the value of the featureSets property.
*
* @return
* possible object is
* {@link Project.FeatureSets }
*
*/
public Project.FeatureSets getFeatureSets() {
return featureSets;
}
/**
* Sets the value of the featureSets property.
*
* @param value
* allowed object is
* {@link Project.FeatureSets }
*
*/
public void setFeatureSets(Project.FeatureSets value) {
this.featureSets = value;
}
/**
* Gets the value of the features property.
*
* @return
* possible object is
* {@link Project.Features }
*
*/
public Project.Features getFeatures() {
return features;
}
/**
* Sets the value of the features property.
*
* @param value
* allowed object is
* {@link Project.Features }
*
*/
public void setFeatures(Project.Features value) {
this.features = value;
}
/**
* Gets the value of the projectName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getProjectName() {
return projectName;
}
/**
* Sets the value of the projectName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setProjectName(String value) {
this.projectName = value;
}
/**
* Gets the value of the startDate property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getStartDate() {
return startDate;
}
/**
* Sets the value of the startDate property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setStartDate(XMLGregorianCalendar value) {
this.startDate = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element name="FeatureSet">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="FeatureSetId" use="required" type="{http://www.w3.org/2001/XMLSchema}ID" />
* <attribute name="Name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"featureSet"
})
public static class FeatureSets {
@XmlElement(name = "FeatureSet", required = true)
protected List<Project.FeatureSets.FeatureSet> featureSet;
/**
* Gets the value of the featureSet property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the featureSet property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getFeatureSet().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Project.FeatureSets.FeatureSet }
*
*
*/
public List<Project.FeatureSets.FeatureSet> getFeatureSet() {
if (featureSet == null) {
featureSet = new ArrayList<Project.FeatureSets.FeatureSet>();
}
return this.featureSet;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="FeatureSetId" use="required" type="{http://www.w3.org/2001/XMLSchema}ID" />
* <attribute name="Name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class FeatureSet {
@XmlAttribute(name = "FeatureSetId", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String featureSetId;
@XmlAttribute(name = "Name", required = true)
protected String name;
/**
* Gets the value of the featureSetId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFeatureSetId() {
return featureSetId;
}
/**
* Sets the value of the featureSetId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFeatureSetId(String value) {
this.featureSetId = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element name="Feature">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="Name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="FeatureSet" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" />
* <attribute name="DomainWalkthroughPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DomainWalkthroughActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignReviewPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignReviewActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodePlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeReviewPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeReviewActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="PromoteToBuildPlanned" use="required" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="PromoteToBuildActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"feature"
})
public static class Features {
@XmlElement(name = "Feature", required = true)
protected List<Project.Features.Feature> feature;
/**
* Gets the value of the feature property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the feature property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getFeature().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Project.Features.Feature }
*
*
*/
public List<Project.Features.Feature> getFeature() {
if (feature == null) {
feature = new ArrayList<Project.Features.Feature>();
}
return this.feature;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="Name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="FeatureSet" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" />
* <attribute name="DomainWalkthroughPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DomainWalkthroughActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignReviewPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="DesignReviewActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodePlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeReviewPlanned" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="CodeReviewActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="PromoteToBuildPlanned" use="required" type="{http://www.w3.org/2001/XMLSchema}date" />
* <attribute name="PromoteToBuildActual" type="{http://www.w3.org/2001/XMLSchema}date" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class Feature {
@XmlAttribute(name = "Name", required = true)
protected String name;
@XmlAttribute(name = "FeatureSet", required = true)
@XmlIDREF
@XmlSchemaType(name = "IDREF")
protected Object featureSet;
@XmlAttribute(name = "DomainWalkthroughPlanned")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar domainWalkthroughPlanned;
@XmlAttribute(name = "DomainWalkthroughActual")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar domainWalkthroughActual;
@XmlAttribute(name = "DesignPlanned")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar designPlanned;
@XmlAttribute(name = "DesignActual")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar designActual;
@XmlAttribute(name = "DesignReviewPlanned")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar designReviewPlanned;
@XmlAttribute(name = "DesignReviewActual")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar designReviewActual;
@XmlAttribute(name = "CodePlanned")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar codePlanned;
@XmlAttribute(name = "CodeActual")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar codeActual;
@XmlAttribute(name = "CodeReviewPlanned")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar codeReviewPlanned;
@XmlAttribute(name = "CodeReviewActual")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar codeReviewActual;
@XmlAttribute(name = "PromoteToBuildPlanned", required = true)
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar promoteToBuildPlanned;
@XmlAttribute(name = "PromoteToBuildActual")
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar promoteToBuildActual;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the featureSet property.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getFeatureSet() {
return featureSet;
}
/**
* Sets the value of the featureSet property.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setFeatureSet(Object value) {
this.featureSet = value;
}
/**
* Gets the value of the domainWalkthroughPlanned property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getDomainWalkthroughPlanned() {
return domainWalkthroughPlanned;
}
/**
* Sets the value of the domainWalkthroughPlanned property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setDomainWalkthroughPlanned(XMLGregorianCalendar value) {
this.domainWalkthroughPlanned = value;
}
/**
* Gets the value of the domainWalkthroughActual property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getDomainWalkthroughActual() {
return domainWalkthroughActual;
}
/**
* Sets the value of the domainWalkthroughActual property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setDomainWalkthroughActual(XMLGregorianCalendar value) {
this.domainWalkthroughActual = value;
}
/**
* Gets the value of the designPlanned property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getDesignPlanned() {
return designPlanned;
}
/**
* Sets the value of the designPlanned property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setDesignPlanned(XMLGregorianCalendar value) {
this.designPlanned = value;
}
/**
* Gets the value of the designActual property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getDesignActual() {
return designActual;
}
/**
* Sets the value of the designActual property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setDesignActual(XMLGregorianCalendar value) {
this.designActual = value;
}
/**
* Gets the value of the designReviewPlanned property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getDesignReviewPlanned() {
return designReviewPlanned;
}
/**
* Sets the value of the designReviewPlanned property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setDesignReviewPlanned(XMLGregorianCalendar value) {
this.designReviewPlanned = value;
}
/**
* Gets the value of the designReviewActual property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getDesignReviewActual() {
return designReviewActual;
}
/**
* Sets the value of the designReviewActual property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setDesignReviewActual(XMLGregorianCalendar value) {
this.designReviewActual = value;
}
/**
* Gets the value of the codePlanned property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getCodePlanned() {
return codePlanned;
}
/**
* Sets the value of the codePlanned property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setCodePlanned(XMLGregorianCalendar value) {
this.codePlanned = value;
}
/**
* Gets the value of the codeActual property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getCodeActual() {
return codeActual;
}
/**
* Sets the value of the codeActual property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setCodeActual(XMLGregorianCalendar value) {
this.codeActual = value;
}
/**
* Gets the value of the codeReviewPlanned property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getCodeReviewPlanned() {
return codeReviewPlanned;
}
/**
* Sets the value of the codeReviewPlanned property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setCodeReviewPlanned(XMLGregorianCalendar value) {
this.codeReviewPlanned = value;
}
/**
* Gets the value of the codeReviewActual property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getCodeReviewActual() {
return codeReviewActual;
}
/**
* Sets the value of the codeReviewActual property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setCodeReviewActual(XMLGregorianCalendar value) {
this.codeReviewActual = value;
}
/**
* Gets the value of the promoteToBuildPlanned property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getPromoteToBuildPlanned() {
return promoteToBuildPlanned;
}
/**
* Sets the value of the promoteToBuildPlanned property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setPromoteToBuildPlanned(XMLGregorianCalendar value) {
this.promoteToBuildPlanned = value;
}
/**
* Gets the value of the promoteToBuildActual property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getPromoteToBuildActual() {
return promoteToBuildActual;
}
/**
* Sets the value of the promoteToBuildActual property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setPromoteToBuildActual(XMLGregorianCalendar value) {
this.promoteToBuildActual = value;
}
}
}
}
| |
/*
* Copyright 2016 Sam Sun <me@samczsun.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.javadeobfuscator.deobfuscator;
import com.javadeobfuscator.deobfuscator.asm.*;
import com.javadeobfuscator.deobfuscator.config.*;
import com.javadeobfuscator.deobfuscator.exceptions.*;
import com.javadeobfuscator.deobfuscator.rules.*;
import com.javadeobfuscator.deobfuscator.transformers.*;
import com.javadeobfuscator.deobfuscator.utils.*;
import org.apache.commons.io.*;
import org.objectweb.asm.*;
import org.objectweb.asm.commons.*;
import org.objectweb.asm.tree.*;
import org.objectweb.asm.util.*;
import org.slf4j.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.AbstractMap.*;
import java.util.*;
import java.util.Map.*;
import java.util.regex.*;
import java.util.zip.*;
public class Deobfuscator {
private Map<String, ClassNode> classpath = new HashMap<>();
private Map<String, ClassNode> libraries = new HashMap<>();
private Map<String, ClassNode> classes = new HashMap<>();
private Map<String, ClassTree> hierachy = new HashMap<>();
private Set<ClassNode> libraryClassnodes = new HashSet<>();
public Map<String, byte[]> getInputPassthrough() {
return inputPassthrough;
}
// Entries from the input jar that will be passed through to the output
private Map<String, byte[]> inputPassthrough = new HashMap<>();
// Constant pool data since ClassNodes don't support custom data
private Map<ClassNode, ConstantPool> constantPools = new HashMap<>();
private Map<ClassNode, ClassReader> readers = new HashMap<>();
private final Configuration configuration;
private final Logger logger = LoggerFactory.getLogger(Deobfuscator.class);
public Deobfuscator(Configuration configuration) {
this.configuration = configuration;
}
private static final boolean DEBUG = false;
/**
* Some obfuscators like to have junk classes. If ALL your libraries are added,
* enable this to dump troublesome classes. Note that this will not get rid of all junk classes.
*/
private static final boolean DELETE_USELESS_CLASSES = false;
public ConstantPool getConstantPool(ClassNode classNode) {
return this.constantPools.get(classNode);
}
public void setConstantPool(ClassNode owner, ConstantPool pool) {
this.constantPools.put(owner, pool);
}
public Map<ClassNode, ConstantPool> getConstantPools() {
return this.constantPools;
}
private Map<String, ClassNode> loadClasspathFile(File file, boolean skipCode) throws IOException {
Map<String, ClassNode> map = new HashMap<>();
ZipFile zipIn = new ZipFile(file);
Enumeration<? extends ZipEntry> entries = zipIn.entries();
while (entries.hasMoreElements()) {
ZipEntry ent = entries.nextElement();
if (ent.getName().endsWith(".class")) {
ClassReader reader = new ClassReader(zipIn.getInputStream(ent));
ClassNode node = new ClassNode();
reader.accept(node, (skipCode ? 0 : 0) | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES);
map.put(node.name, node);
setConstantPool(node, new ConstantPool(reader));
}
}
zipIn.close();
return map;
}
public Map<String, ClassNode> getLibraries() {
return libraries;
}
private void loadClasspath() throws IOException {
if (configuration.getPath() != null) {
for (File file : configuration.getPath()) {
if (file.isFile()) {
classpath.putAll(loadClasspathFile(file, true));
} else {
File[] files = file.listFiles(child -> child.getName().endsWith(".jar"));
if (files != null) {
for (File child : files) {
classpath.putAll(loadClasspathFile(child, true));
}
}
}
}
}
if (configuration.getLibraries() != null) {
for (File file : configuration.getLibraries()) {
if (file.isFile()) {
libraries.putAll(loadClasspathFile(file, false));
} else {
File[] files = file.listFiles(child -> child.getName().endsWith(".jar"));
if (files != null) {
for (File child : files) {
libraries.putAll(loadClasspathFile(child, false));
}
}
}
}
}
classpath.putAll(libraries);
libraryClassnodes.addAll(classpath.values());
}
private boolean isClassIgnored(ClassNode classNode) {
if (configuration.getIgnoredClasses() == null) {
return false;
}
for (String ignored : configuration.getIgnoredClasses()) {
Pattern pattern;
try {
pattern = Pattern.compile(ignored);
} catch (PatternSyntaxException e) {
logger.error("Error while compiling pattern for ignore statement {}", ignored, e);
continue;
}
Matcher matcher = pattern.matcher(classNode.name);
if (matcher.find()) {
return true;
}
}
return false;
}
private void loadInput() throws IOException {
try (ZipFile zipIn = new ZipFile(configuration.getInput())) {
Enumeration<? extends ZipEntry> e = zipIn.entries();
while (e.hasMoreElements()) {
ZipEntry next = e.nextElement();
if (next.isDirectory()) {
continue;
}
byte[] data = IOUtils.toByteArray(zipIn.getInputStream(next));
loadInput(next.getName(), data);
}
classpath.putAll(classes);
}
}
public void loadInput(String name, byte[] data) {
boolean passthrough = true;
if (name.endsWith(".class")) {
try {
ClassReader reader = new ClassReader(data);
ClassNode node = new ClassNode();
reader.accept(node, ClassReader.SKIP_FRAMES);
readers.put(node, reader);
setConstantPool(node, new ConstantPool(reader));
if (!isClassIgnored(node)) {
for (int i = 0; i < node.methods.size(); i++) {
MethodNode methodNode = node.methods.get(i);
JSRInlinerAdapter adapter = new JSRInlinerAdapter(methodNode, methodNode.access, methodNode.name, methodNode.desc, methodNode.signature, methodNode.exceptions.toArray(new String[0]));
methodNode.accept(adapter);
node.methods.set(i, adapter);
}
classes.put(node.name, node);
passthrough = false;
} else {
classpath.put(node.name, node);
}
} catch (IllegalArgumentException x) {
logger.error("Could not parse {} (is it a class file?)", name, x);
}
}
if (passthrough) {
inputPassthrough.put(name, data);
}
}
/**
* @deprecated do we need this?
*/
@Deprecated
private void computeCallers() {
Map<MethodNode, List<Entry<ClassNode, MethodNode>>> callers = new HashMap<>();
classes.values().forEach(classNode -> {
classNode.methods.forEach(methodNode -> {
for (int i = 0; i < methodNode.instructions.size(); i++) {
AbstractInsnNode node = methodNode.instructions.get(i);
if (node instanceof MethodInsnNode) {
MethodInsnNode mn = (MethodInsnNode) node;
ClassNode targetNode = classes.get(mn.owner);
if (targetNode != null) {
MethodNode targetMethod = targetNode.methods.stream().filter(m -> m.name.equals(mn.name) && m.desc.equals(mn.desc)).findFirst().orElse(null);
if (targetMethod != null) {
callers.computeIfAbsent(targetMethod, k -> new ArrayList<>()).add(new SimpleEntry<>(classNode, methodNode));
}
}
}
}
});
});
}
public boolean isLibrary(ClassNode classNode) {
return libraryClassnodes.contains(classNode);
}
public void start() throws Throwable {
logger.info("Loading classpath");
loadClasspath();
logger.info("Loading input");
loadInput();
if (getConfig().isDetect()) {
logger.info("Detecting known obfuscators");
for (Rule rule : Rules.RULES) {
String message = rule.test(this);
if (message == null) {
continue;
}
logger.info("");
logger.info("{}: {}", rule.getClass().getSimpleName(), rule.getDescription());
logger.info("\t{}", message);
logger.info("Recommend transformers:");
Collection<Class<? extends Transformer>> recommended = rule.getRecommendTransformers();
if (recommended == null) {
logger.info("\tNone");
} else {
for (Class<? extends Transformer> transformer : recommended) {
logger.info("\t{}", transformer.getName());
}
}
}
return;
}
logger.info("Computing callers");
computeCallers();
logger.info("Transforming");
if (configuration.getTransformers() != null) {
for (TransformerConfig config : configuration.getTransformers()) {
logger.info("Running {}", config.getImplementation().getCanonicalName());
runFromConfig(config);
}
}
logger.info("Writing");
if (DEBUG) {
classes.values().forEach(Utils::printClass);
}
ZipOutputStream zipOut = new ZipOutputStream(new FileOutputStream(configuration.getOutput()));
inputPassthrough.forEach((name, val) -> {
ZipEntry entry = new ZipEntry(name);
try {
zipOut.putNextEntry(entry);
zipOut.write(val);
zipOut.closeEntry();
} catch (IOException e) {
logger.error("Error writing entry {}", name, e);
}
});
classes.values().forEach(classNode -> {
try {
byte[] b = toByteArray(classNode);
if (b != null) {
zipOut.putNextEntry(new ZipEntry(classNode.name + ".class"));
zipOut.write(b);
zipOut.closeEntry();
}
} catch (IOException e) {
logger.error("Error writing entry {}", classNode.name, e);
}
});
zipOut.close();
}
public boolean runFromConfig(TransformerConfig config) throws Throwable {
Transformer transformer = config.getImplementation().newInstance();
transformer.init(this, config, classes, classpath, readers);
boolean madeChangesAtLeastOnce = false;
boolean madeChanges;
do {
madeChanges = transformer.transform();
madeChangesAtLeastOnce = madeChangesAtLeastOnce || madeChanges;
} while (madeChanges && getConfig().isSmartRedo());
return madeChangesAtLeastOnce;
}
public ClassNode assureLoaded(String ref) {
ClassNode clazz = classpath.get(ref);
if (clazz == null) {
throw new NoClassInPathException(ref);
}
return clazz;
}
public ClassNode assureLoadedElseRemove(String referencer, String ref) {
ClassNode clazz = classpath.get(ref);
if (clazz == null) {
classes.remove(referencer);
classpath.remove(referencer);
return null;
}
return clazz;
}
public void loadHierachy() {
Set<String> processed = new HashSet<>();
LinkedList<ClassNode> toLoad = new LinkedList<>();
toLoad.addAll(this.classes.values());
while (!toLoad.isEmpty()) {
for (ClassNode toProcess : loadHierachy(toLoad.poll())) {
if (processed.add(toProcess.name)) {
toLoad.add(toProcess);
}
}
}
}
public void loadHierachyAll(ClassNode classNode) {
Set<String> processed = new HashSet<>();
LinkedList<ClassNode> toLoad = new LinkedList<>();
toLoad.add(classNode);
while (!toLoad.isEmpty()) {
for (ClassNode toProcess : loadHierachy(toLoad.poll())) {
if (processed.add(toProcess.name)) {
toLoad.add(toProcess);
}
}
}
}
public void resetHierachy() {
this.hierachy.clear();
}
private ClassTree getOrCreateClassTree(String name) {
return this.hierachy.computeIfAbsent(name, ClassTree::new);
}
public List<ClassNode> loadHierachy(ClassNode specificNode) {
if (specificNode.name.equals("java/lang/Object")) {
return Collections.emptyList();
}
if ((specificNode.access & Opcodes.ACC_INTERFACE) != 0) {
getOrCreateClassTree(specificNode.name).parentClasses.add("java/lang/Object");
return Collections.emptyList();
}
List<ClassNode> toProcess = new ArrayList<>();
ClassTree thisTree = getOrCreateClassTree(specificNode.name);
ClassNode superClass;
if (DELETE_USELESS_CLASSES) {
superClass = assureLoadedElseRemove(specificNode.name, specificNode.superName);
if (superClass == null)
//It got removed
return toProcess;
} else
superClass = assureLoaded(specificNode.superName);
if (superClass == null) {
throw new IllegalArgumentException("Could not load " + specificNode.name);
}
ClassTree superTree = getOrCreateClassTree(superClass.name);
superTree.subClasses.add(specificNode.name);
thisTree.parentClasses.add(superClass.name);
toProcess.add(superClass);
for (String interfaceReference : specificNode.interfaces) {
ClassNode interfaceNode;
if (DELETE_USELESS_CLASSES) {
interfaceNode = assureLoadedElseRemove(specificNode.name, interfaceReference);
if (interfaceNode == null)
//It got removed
return toProcess;
} else
interfaceNode = assureLoaded(interfaceReference);
if (interfaceNode == null) {
throw new IllegalArgumentException("Could not load " + interfaceReference);
}
ClassTree interfaceTree = getOrCreateClassTree(interfaceReference);
interfaceTree.subClasses.add(specificNode.name);
thisTree.parentClasses.add(interfaceReference);
toProcess.add(interfaceNode);
}
return toProcess;
}
public boolean isSubclass(String possibleParent, String possibleChild) {
if (possibleParent.equals(possibleChild)) {
return true;
}
loadHierachyAll(assureLoaded(possibleParent));
loadHierachyAll(assureLoaded(possibleChild));
ClassTree parentTree = hierachy.get(possibleParent);
if (parentTree != null && hierachy.get(possibleChild) != null) {
List<String> layer = new ArrayList<>();
layer.add(possibleParent);
layer.addAll(parentTree.subClasses);
while (!layer.isEmpty()) {
if (layer.contains(possibleChild)) {
return true;
}
List<String> clone = new ArrayList<>(layer);
layer.clear();
for (String r : clone) {
ClassTree tree = hierachy.get(r);
if (tree != null)
layer.addAll(tree.subClasses);
}
}
}
return false;
}
public ClassTree getClassTree(String classNode) {
ClassTree tree = hierachy.get(classNode);
if (tree == null) {
loadHierachyAll(assureLoaded(classNode));
return getClassTree(classNode);
}
return tree;
}
public byte[] toByteArray(ClassNode node) {
if (node.innerClasses != null) {
node.innerClasses.stream().filter(in -> in.innerName != null).forEach(in -> {
if (in.innerName.indexOf('/') != -1) {
in.innerName = in.innerName.substring(in.innerName.lastIndexOf('/') + 1); //Stringer
}
});
}
ClassWriter writer = new CustomClassWriter(ClassWriter.COMPUTE_FRAMES);
try {
node.accept(writer);
} catch (Throwable e) {
if (e instanceof NoClassInPathException) {
NoClassInPathException ex = (NoClassInPathException) e;
System.out.println("Error: " + ex.getClassName() + " could not be found while writing " + node.name + ". Using COMPUTE_MAXS");
writer = new CustomClassWriter(ClassWriter.COMPUTE_MAXS);
node.accept(writer);
} else if (e instanceof NegativeArraySizeException || e instanceof ArrayIndexOutOfBoundsException) {
System.out.println("Error: failed to compute frames");
writer = new CustomClassWriter(ClassWriter.COMPUTE_MAXS);
node.accept(writer);
} else if (e.getMessage() != null) {
if (e.getMessage().contains("JSR/RET")) {
System.out.println("ClassNode contained JSR/RET so COMPUTE_MAXS instead");
writer = new CustomClassWriter(ClassWriter.COMPUTE_MAXS);
node.accept(writer);
} else {
System.out.println("Error while writing " + node.name);
e.printStackTrace(System.out);
}
} else {
System.out.println("Error while writing " + node.name);
e.printStackTrace(System.out);
}
}
byte[] classBytes = writer.toByteArray();
if (configuration.isVerify()) {
ClassReader cr = new ClassReader(classBytes);
try {
cr.accept(new CheckClassAdapter(new ClassWriter(0)), 0);
} catch (Throwable t) {
System.out.println("Error: " + node.name + " failed verification");
t.printStackTrace(System.out);
}
}
return classBytes;
}
public Configuration getConfig() {
return this.configuration;
}
public Map<String, ClassNode> getClasses() {
return this.classes;
}
public Map<ClassNode, ClassReader> getReaders() {
return readers;
}
public class CustomClassWriter extends ClassWriter {
public CustomClassWriter(int flags) {
super(flags);
}
@Override
protected String getCommonSuperClass(String type1, String type2) {
return getCommonSuperClass1(type1, type2);
}
private String getCommonSuperClass1(String type1, String type2) {
if (type1.equals("java/lang/Object") || type2.equals("java/lang/Object")) {
return "java/lang/Object";
}
String a = getCommonSuperClass0(type1, type2);
String b = getCommonSuperClass0(type2, type1);
if (!a.equals("java/lang/Object")) {
return a;
}
if (!b.equals("java/lang/Object")) {
return b;
}
ClassNode first = assureLoaded(type1);
ClassNode second = assureLoaded(type2);
return getCommonSuperClass(first.superName, second.superName);
}
private String getCommonSuperClass0(String type1, String type2) {
ClassNode first = assureLoaded(type1);
ClassNode second = assureLoaded(type2);
if (isAssignableFrom(type1, type2)) {
return type1;
} else if (isAssignableFrom(type2, type1)) {
return type2;
} else if (Modifier.isInterface(first.access) || Modifier.isInterface(second.access)) {
return "java/lang/Object";
} else {
do {
type1 = first.superName;
first = assureLoaded(type1);
} while (!isAssignableFrom(type1, type2));
return type1;
}
}
private boolean isAssignableFrom(String type1, String type2) {
if (type1.equals("java/lang/Object"))
return true;
if (type1.equals(type2)) {
return true;
}
assureLoaded(type1);
assureLoaded(type2);
ClassTree firstTree = getClassTree(type1);
Set<String> allChilds1 = new HashSet<>();
LinkedList<String> toProcess = new LinkedList<>();
toProcess.addAll(firstTree.subClasses);
while (!toProcess.isEmpty()) {
String s = toProcess.poll();
if (allChilds1.add(s)) {
assureLoaded(s);
ClassTree tempTree = getClassTree(s);
toProcess.addAll(tempTree.subClasses);
}
}
return allChilds1.contains(type2);
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.auth;
import com.intellij.concurrency.JobScheduler;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.NamedRunnable;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.impl.GenericNotifierImpl;
import com.intellij.openapi.vcs.ui.VcsBalloonProblemNotifier;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Consumer;
import com.intellij.util.ThreeState;
import com.intellij.util.net.HttpConfigurable;
import com.intellij.util.proxy.CommonProxy;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.jetbrains.idea.svn.*;
import org.jetbrains.idea.svn.api.ClientFactory;
import org.jetbrains.idea.svn.commandLine.SvnBindException;
import org.jetbrains.idea.svn.info.Info;
import org.jetbrains.idea.svn.info.InfoClient;
import org.tmatesoft.svn.core.SVNAuthenticationException;
import org.tmatesoft.svn.core.SVNCancelException;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.auth.SVNAuthentication;
import org.tmatesoft.svn.core.internal.util.SVNURLUtil;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc2.SvnTarget;
import javax.swing.*;
import java.awt.*;
import java.io.File;
import java.io.FilenameFilter;
import java.net.*;
import java.util.*;
import java.util.List;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public class SvnAuthenticationNotifier extends GenericNotifierImpl<SvnAuthenticationNotifier.AuthenticationRequest, SVNURL> {
private static final Logger LOG = Logger.getInstance(SvnAuthenticationNotifier.class);
private static final List<String> ourAuthKinds = Arrays.asList(ISVNAuthenticationManager.PASSWORD, ISVNAuthenticationManager.SSH,
ISVNAuthenticationManager.SSL, ISVNAuthenticationManager.USERNAME, "svn.ssl.server", "svn.ssh.server");
private final SvnVcs myVcs;
private final RootsToWorkingCopies myRootsToWorkingCopies;
private final Map<SVNURL, Boolean> myCopiesPassiveResults;
private ScheduledFuture<?> myTimer;
private volatile boolean myVerificationInProgress;
public SvnAuthenticationNotifier(final SvnVcs svnVcs) {
super(svnVcs.getProject(), svnVcs.getDisplayName(), "Not Logged In to Subversion", NotificationType.ERROR);
myVcs = svnVcs;
myRootsToWorkingCopies = myVcs.getRootsToWorkingCopies();
myCopiesPassiveResults = Collections.synchronizedMap(new HashMap<SVNURL, Boolean>());
myVerificationInProgress = false;
}
public void init() {
if (myTimer != null) {
stop();
}
myTimer =
// every 10 minutes
JobScheduler.getScheduler().scheduleWithFixedDelay(myCopiesPassiveResults::clear, 10, 10 * 60, TimeUnit.SECONDS);
}
public void stop() {
myTimer.cancel(false);
myTimer = null;
}
@Override
protected boolean ask(final AuthenticationRequest obj, String description) {
if (myVerificationInProgress) {
return showAlreadyChecking();
}
myVerificationInProgress = true;
final Ref<Boolean> resultRef = new Ref<>();
final Runnable checker = new Runnable() {
@Override
public void run() {
try {
final boolean result =
interactiveValidation(obj.myProject, obj.getUrl(), obj.getRealm(), obj.getKind());
log("ask result for: " + obj.getUrl() + " is: " + result);
resultRef.set(result);
if (result) {
onStateChangedToSuccess(obj);
}
}
finally {
myVerificationInProgress = false;
}
}
};
final Application application = ApplicationManager.getApplication();
// also do not show auth if thread does not have progress indicator
if (application.isReadAccessAllowed() || !ProgressManager.getInstance().hasProgressIndicator()) {
application.executeOnPooledThread(checker);
}
else {
checker.run();
return resultRef.get();
}
return false;
}
private boolean showAlreadyChecking() {
final IdeFrame frameFor = WindowManagerEx.getInstanceEx().findFrameFor(myProject);
if (frameFor != null) {
final JComponent component = frameFor.getComponent();
Point point = component.getMousePosition();
if (point == null) {
point = new Point((int)(component.getWidth() * 0.7), 0);
}
SwingUtilities.convertPointToScreen(point, component);
JBPopupFactory.getInstance().createHtmlTextBalloonBuilder("Already checking...", MessageType.WARNING, null).
createBalloon().show(new RelativePoint(point), Balloon.Position.below);
}
return false;
}
private void onStateChangedToSuccess(final AuthenticationRequest obj) {
myCopiesPassiveResults.put(getKey(obj), true);
myVcs.invokeRefreshSvnRoots();
final List<SVNURL> outdatedRequests = new LinkedList<>();
final Collection<SVNURL> keys = getAllCurrentKeys();
for (SVNURL key : keys) {
final SVNURL commonURLAncestor = SVNURLUtil.getCommonURLAncestor(key, obj.getUrl());
if ((commonURLAncestor != null) && (! StringUtil.isEmptyOrSpaces(commonURLAncestor.getHost())) &&
(! StringUtil.isEmptyOrSpaces(commonURLAncestor.getPath()))) {
//final AuthenticationRequest currObj = getObj(key);
//if ((currObj != null) && passiveValidation(myVcs.getProject(), key, true, currObj.getRealm(), currObj.getKind())) {
outdatedRequests.add(key);
//}
}
}
log("on state changed ");
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
for (SVNURL key : outdatedRequests) {
removeLazyNotificationByKey(key);
}
}
}, ModalityState.NON_MODAL);
}
@Override
public boolean ensureNotify(AuthenticationRequest obj) {
final SVNURL key = getKey(obj);
myCopiesPassiveResults.remove(key);
/*VcsBalloonProblemNotifier.showOverChangesView(myVcs.getProject(), "You are not authenticated to '" + obj.getRealm() + "'." +
"To login, see pending notifications.", MessageType.ERROR);*/
return super.ensureNotify(obj);
}
@Override
protected boolean onFirstNotification(AuthenticationRequest obj) {
if (ProgressManager.getInstance().hasProgressIndicator()) {
return ask(obj, null); // TODO
} else {
return false;
}
}
@Nonnull
@Override
public SVNURL getKey(final AuthenticationRequest obj) {
// !!! wc's URL
return obj.getWcUrl();
}
@Nullable
public SVNURL getWcUrl(final AuthenticationRequest obj) {
if (obj.isOutsideCopies()) return null;
if (obj.getWcUrl() != null) return obj.getWcUrl();
final WorkingCopy copy = myRootsToWorkingCopies.getMatchingCopy(obj.getUrl());
if (copy != null) {
obj.setOutsideCopies(false);
obj.setWcUrl(copy.getUrl());
} else {
obj.setOutsideCopies(true);
}
return copy == null ? null : copy.getUrl();
}
/**
* Bases on presence of notifications!
*/
public ThreeState isAuthenticatedFor(@Nonnull VirtualFile vf, @Nullable ClientFactory factory) {
final WorkingCopy wcCopy = myRootsToWorkingCopies.getWcRoot(vf);
if (wcCopy == null) return ThreeState.UNSURE;
// check there's no cancellation yet
final boolean haveCancellation = getStateFor(wcCopy.getUrl());
if (haveCancellation) return ThreeState.NO;
final Boolean keptResult = myCopiesPassiveResults.get(wcCopy.getUrl());
if (Boolean.TRUE.equals(keptResult)) return ThreeState.YES;
if (Boolean.FALSE.equals(keptResult)) return ThreeState.NO;
// check have credentials
final boolean calculatedResult =
factory == null ? passiveValidation(myVcs.getProject(), wcCopy.getUrl()) : passiveValidation(factory, wcCopy.getUrl());
myCopiesPassiveResults.put(wcCopy.getUrl(), calculatedResult);
return calculatedResult ? ThreeState.YES : ThreeState.NO;
}
private static boolean passiveValidation(@Nonnull ClientFactory factory, @Nonnull SVNURL url) {
Info info = null;
try {
info = factory.create(InfoClient.class, false).doInfo(SvnTarget.fromURL(url), SVNRevision.UNDEFINED);
}
catch (SvnBindException ignore) {
}
return info != null;
}
@Nonnull
@Override
protected String getNotificationContent(AuthenticationRequest obj) {
return "<a href=\"\">Click to fix.</a> Not logged In to Subversion '" + obj.getRealm() + "' (" + obj.getUrl().toDecodedString() + ")";
}
public static class AuthenticationRequest {
private final Project myProject;
private final String myKind;
private final SVNURL myUrl;
private final String myRealm;
private SVNURL myWcUrl;
private boolean myOutsideCopies;
private boolean myForceSaving;
public AuthenticationRequest(Project project, String kind, SVNURL url, String realm) {
myProject = project;
myKind = kind;
myUrl = url;
myRealm = realm;
}
public boolean isForceSaving() {
return myForceSaving;
}
public void setForceSaving(boolean forceSaving) {
myForceSaving = forceSaving;
}
public boolean isOutsideCopies() {
return myOutsideCopies;
}
public void setOutsideCopies(boolean outsideCopies) {
myOutsideCopies = outsideCopies;
}
public SVNURL getWcUrl() {
return myWcUrl;
}
public void setWcUrl(SVNURL wcUrl) {
myWcUrl = wcUrl;
}
public String getKind() {
return myKind;
}
public SVNURL getUrl() {
return myUrl;
}
public String getRealm() {
return myRealm;
}
}
static void log(final Throwable t) {
LOG.debug(t);
}
static void log(final String s) {
LOG.debug(s);
}
public static boolean passiveValidation(final Project project, final SVNURL url) {
final SvnConfiguration configuration = SvnConfiguration.getInstance(project);
final SvnAuthenticationManager passiveManager = configuration.getPassiveAuthenticationManager(project);
return validationImpl(project, url, configuration, passiveManager, false, null, null, false);
}
public static boolean interactiveValidation(final Project project, final SVNURL url, final String realm, final String kind) {
final SvnConfiguration configuration = SvnConfiguration.getInstance(project);
final SvnAuthenticationManager passiveManager = configuration.getInteractiveManager(SvnVcs.getInstance(project));
return validationImpl(project, url, configuration, passiveManager, true, realm, kind, true);
}
private static boolean validationImpl(final Project project, final SVNURL url,
final SvnConfiguration configuration, final SvnAuthenticationManager manager,
final boolean checkWrite,
final String realm,
final String kind, boolean interactive) {
// we should also NOT show proxy credentials dialog if at least fixed proxy was used, so
Proxy proxyToRelease = null;
if (! interactive && configuration.isIsUseDefaultProxy()) {
final HttpConfigurable instance = HttpConfigurable.getInstance();
if (instance.USE_HTTP_PROXY && instance.PROXY_AUTHENTICATION && (StringUtil.isEmptyOrSpaces(instance.getProxyLogin()) ||
StringUtil.isEmptyOrSpaces(instance.getPlainProxyPassword()))) {
return false;
}
if (instance.USE_PROXY_PAC) {
final List<Proxy> select;
try {
select = CommonProxy.getInstance().select(new URI(url.toString()));
}
catch (URISyntaxException e) {
LOG.info("wrong URL: " + url.toString());
return false;
}
if (select != null && ! select.isEmpty()) {
for (Proxy proxy : select) {
if (HttpConfigurable.isRealProxy(proxy) && Proxy.Type.HTTP.equals(proxy.type())) {
final InetSocketAddress address = (InetSocketAddress)proxy.address();
final PasswordAuthentication password =
HttpConfigurable.getInstance().getGenericPassword(address.getHostName(), address.getPort());
if (password == null) {
CommonProxy.getInstance().noAuthentication("http", address.getHostName(), address.getPort());
proxyToRelease = proxy;
}
}
}
}
}
}
SvnInteractiveAuthenticationProvider.clearCallState();
try {
// start svnkit authentication cycle
SvnVcs.getInstance(project).getSvnKitManager().createWCClient(manager).doInfo(url, SVNRevision.UNDEFINED, SVNRevision.HEAD);
//SvnVcs.getInstance(project).getInfo(url, SVNRevision.HEAD, manager);
} catch (SVNAuthenticationException | SVNCancelException e) {
log(e);
return false;
} catch (final SVNException e) {
if (e.getErrorMessage().getErrorCode().isAuthentication()) {
log(e);
return false;
}
LOG.info("some other exc", e);
if (interactive) {
showAuthenticationFailedWithHotFixes(project, configuration, e);
}
return false; /// !!!! any exception means user should be notified that authorization failed
} finally {
if (! interactive && configuration.isIsUseDefaultProxy() && proxyToRelease != null) {
final InetSocketAddress address = (InetSocketAddress)proxyToRelease.address();
CommonProxy.getInstance().noAuthentication("http", address.getHostName(), address.getPort());
}
}
if (! checkWrite) {
return true;
}
/*if (passive) {
return SvnInteractiveAuthenticationProvider.wasCalled();
}*/
if (SvnInteractiveAuthenticationProvider.wasCalled() && SvnInteractiveAuthenticationProvider.wasCancelled()) return false;
if (SvnInteractiveAuthenticationProvider.wasCalled()) return true;
final SvnVcs svnVcs = SvnVcs.getInstance(project);
final SvnInteractiveAuthenticationProvider provider = new SvnInteractiveAuthenticationProvider(svnVcs, manager);
final SVNAuthentication svnAuthentication = provider.requestClientAuthentication(kind, url, realm, null, null, true);
if (svnAuthentication != null) {
configuration.acknowledge(kind, realm, svnAuthentication);
try {
configuration.getAuthenticationManager(svnVcs).acknowledgeAuthentication(true, kind, realm, null, svnAuthentication);
}
catch (SVNException e) {
LOG.info(e);
}
return true;
}
return false;
}
private static void showAuthenticationFailedWithHotFixes(final Project project,
final SvnConfiguration configuration,
final SVNException e) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
VcsBalloonProblemNotifier.showOverChangesView(project, "Authentication failed: " + e.getMessage(), MessageType.ERROR,
new NamedRunnable(
SvnBundle.message("confirmation.title.clear.authentication.cache")) {
@Override
public void run() {
clearAuthenticationCache(project, null, configuration
.getConfigurationDirectory());
}
},
new NamedRunnable(SvnBundle.message("action.title.select.configuration.directory")) {
@Override
public void run() {
SvnConfigurable
.selectConfigurationDirectory(configuration.getConfigurationDirectory(),
new Consumer<String>() {
@Override
public void consume(String s) {
configuration
.setConfigurationDirParameters(false, s);
}
}, project, null);
}
}
);
}
}, ModalityState.NON_MODAL, project.getDisposed());
}
public static void clearAuthenticationCache(@Nonnull final Project project, final Component component, final String configDirPath) {
if (configDirPath != null) {
int result;
if (component == null) {
result = Messages.showYesNoDialog(project, SvnBundle.message("confirmation.text.delete.stored.authentication.information"),
SvnBundle.message("confirmation.title.clear.authentication.cache"),
Messages.getWarningIcon());
} else {
result = Messages.showYesNoDialog(component, SvnBundle.message("confirmation.text.delete.stored.authentication.information"),
SvnBundle.message("confirmation.title.clear.authentication.cache"),
Messages.getWarningIcon());
}
if (result == Messages.YES) {
SvnConfiguration.RUNTIME_AUTH_CACHE.clear();
clearAuthenticationDirectory(SvnConfiguration.getInstance(project));
}
}
}
public static void clearAuthenticationDirectory(@Nonnull SvnConfiguration configuration) {
final File authDir = new File(configuration.getConfigurationDirectory(), "auth");
if (authDir.exists()) {
final Runnable process = new Runnable() {
@Override
public void run() {
final ProgressIndicator ind = ProgressManager.getInstance().getProgressIndicator();
if (ind != null) {
ind.setIndeterminate(true);
ind.setText("Clearing stored credentials in " + authDir.getAbsolutePath());
}
final File[] files = authDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(@Nonnull File dir, @Nonnull String name) {
return ourAuthKinds.contains(name);
}
});
for (File dir : files) {
if (ind != null) {
ind.setText("Deleting " + dir.getAbsolutePath());
}
FileUtil.delete(dir);
}
}
};
final Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode() || !application.isDispatchThread()) {
process.run();
}
else {
ProgressManager.getInstance()
.runProcessWithProgressSynchronously(process, "button.text.clear.authentication.cache", false, configuration.getProject());
}
}
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package services;
import java.io.Serializable;
import javax.persistence.Query;
import javax.persistence.EntityNotFoundException;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import model.College;
import model.Office;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import model.School;
import services.exceptions.NonexistentEntityException;
/**
*
* @author jmatar
*/
public class SchoolJpaController implements Serializable {
public SchoolJpaController(EntityManagerFactory emf) {
this.emf = emf;
}
private EntityManagerFactory emf = null;
public EntityManager getEntityManager() {
return emf.createEntityManager();
}
public void create(School school) {
if (school.getOfficeList() == null) {
school.setOfficeList(new ArrayList<Office>());
}
EntityManager em = null;
try {
em = getEntityManager();
em.getTransaction().begin();
College collegeID = school.getCollegeID();
if (collegeID != null) {
collegeID = em.getReference(collegeID.getClass(), collegeID.getId());
school.setCollegeID(collegeID);
}
List<Office> attachedOfficeList = new ArrayList<Office>();
for (Office officeListOfficeToAttach : school.getOfficeList()) {
officeListOfficeToAttach = em.getReference(officeListOfficeToAttach.getClass(), officeListOfficeToAttach.getId());
attachedOfficeList.add(officeListOfficeToAttach);
}
school.setOfficeList(attachedOfficeList);
em.persist(school);
if (collegeID != null) {
collegeID.getSchoolList().add(school);
collegeID = em.merge(collegeID);
}
for (Office officeListOffice : school.getOfficeList()) {
School oldSchoolIDOfOfficeListOffice = officeListOffice.getSchoolID();
officeListOffice.setSchoolID(school);
officeListOffice = em.merge(officeListOffice);
if (oldSchoolIDOfOfficeListOffice != null) {
oldSchoolIDOfOfficeListOffice.getOfficeList().remove(officeListOffice);
oldSchoolIDOfOfficeListOffice = em.merge(oldSchoolIDOfOfficeListOffice);
}
}
em.getTransaction().commit();
} finally {
if (em != null) {
em.close();
}
}
}
public void edit(School school) throws NonexistentEntityException, Exception {
EntityManager em = null;
try {
em = getEntityManager();
em.getTransaction().begin();
School persistentSchool = em.find(School.class, school.getId());
College collegeIDOld = persistentSchool.getCollegeID();
College collegeIDNew = school.getCollegeID();
List<Office> officeListOld = persistentSchool.getOfficeList();
List<Office> officeListNew = school.getOfficeList();
if (collegeIDNew != null) {
collegeIDNew = em.getReference(collegeIDNew.getClass(), collegeIDNew.getId());
school.setCollegeID(collegeIDNew);
}
List<Office> attachedOfficeListNew = new ArrayList<Office>();
for (Office officeListNewOfficeToAttach : officeListNew) {
officeListNewOfficeToAttach = em.getReference(officeListNewOfficeToAttach.getClass(), officeListNewOfficeToAttach.getId());
attachedOfficeListNew.add(officeListNewOfficeToAttach);
}
officeListNew = attachedOfficeListNew;
school.setOfficeList(officeListNew);
school = em.merge(school);
if (collegeIDOld != null && !collegeIDOld.equals(collegeIDNew)) {
collegeIDOld.getSchoolList().remove(school);
collegeIDOld = em.merge(collegeIDOld);
}
if (collegeIDNew != null && !collegeIDNew.equals(collegeIDOld)) {
collegeIDNew.getSchoolList().add(school);
collegeIDNew = em.merge(collegeIDNew);
}
for (Office officeListOldOffice : officeListOld) {
if (!officeListNew.contains(officeListOldOffice)) {
officeListOldOffice.setSchoolID(null);
officeListOldOffice = em.merge(officeListOldOffice);
}
}
for (Office officeListNewOffice : officeListNew) {
if (!officeListOld.contains(officeListNewOffice)) {
School oldSchoolIDOfOfficeListNewOffice = officeListNewOffice.getSchoolID();
officeListNewOffice.setSchoolID(school);
officeListNewOffice = em.merge(officeListNewOffice);
if (oldSchoolIDOfOfficeListNewOffice != null && !oldSchoolIDOfOfficeListNewOffice.equals(school)) {
oldSchoolIDOfOfficeListNewOffice.getOfficeList().remove(officeListNewOffice);
oldSchoolIDOfOfficeListNewOffice = em.merge(oldSchoolIDOfOfficeListNewOffice);
}
}
}
em.getTransaction().commit();
} catch (Exception ex) {
String msg = ex.getLocalizedMessage();
if (msg == null || msg.length() == 0) {
Integer id = school.getId();
if (findSchool(id) == null) {
throw new NonexistentEntityException("The school with id " + id + " no longer exists.");
}
}
throw ex;
} finally {
if (em != null) {
em.close();
}
}
}
public void destroy(Integer id) throws NonexistentEntityException {
EntityManager em = null;
try {
em = getEntityManager();
em.getTransaction().begin();
School school;
try {
school = em.getReference(School.class, id);
school.getId();
} catch (EntityNotFoundException enfe) {
throw new NonexistentEntityException("The school with id " + id + " no longer exists.", enfe);
}
College collegeID = school.getCollegeID();
if (collegeID != null) {
collegeID.getSchoolList().remove(school);
collegeID = em.merge(collegeID);
}
List<Office> officeList = school.getOfficeList();
for (Office officeListOffice : officeList) {
officeListOffice.setSchoolID(null);
officeListOffice = em.merge(officeListOffice);
}
em.remove(school);
em.getTransaction().commit();
} finally {
if (em != null) {
em.close();
}
}
}
public List<School> findSchoolEntities() {
return findSchoolEntities(true, -1, -1);
}
public List<School> findSchoolEntities(int maxResults, int firstResult) {
return findSchoolEntities(false, maxResults, firstResult);
}
private List<School> findSchoolEntities(boolean all, int maxResults, int firstResult) {
EntityManager em = getEntityManager();
try {
CriteriaQuery cq = em.getCriteriaBuilder().createQuery();
cq.select(cq.from(School.class));
Query q = em.createQuery(cq);
if (!all) {
q.setMaxResults(maxResults);
q.setFirstResult(firstResult);
}
return q.getResultList();
} finally {
em.close();
}
}
public School findSchool(Integer id) {
EntityManager em = getEntityManager();
try {
return em.find(School.class, id);
} finally {
em.close();
}
}
public int getSchoolCount() {
EntityManager em = getEntityManager();
try {
CriteriaQuery cq = em.getCriteriaBuilder().createQuery();
Root<School> rt = cq.from(School.class);
cq.select(em.getCriteriaBuilder().count(rt));
Query q = em.createQuery(cq);
return ((Long) q.getSingleResult()).intValue();
} finally {
em.close();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.