gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.github.anno4j.querying;
import com.github.anno4j.model.Annotation;
import com.github.anno4j.model.namespaces.*;
import com.github.anno4j.querying.evaluation.EvalQuery;
import com.hp.hpl.jena.query.Query;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.marmotta.ldpath.parser.ParseException;
import org.openrdf.model.URI;
import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.model.vocabulary.SKOS;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.object.ObjectConnection;
import org.openrdf.repository.object.ObjectQuery;
import org.openrdf.repository.object.ObjectRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* The QueryService allows to query triple stores by using criteria. Furthermore
* this is provided by simple classes. This is why the user does not need to write SPARQL queries
* by himself.
*
* @param <T>
* @author Andreas Eisenkolb
*/
public class QueryService<T extends Annotation> {
private final Logger logger = LoggerFactory.getLogger(QueryService.class);
private final URI graph;
/**
* The type of the result set.
*/
private Class<T> type;
/**
* The repository needed for the actual querying
*/
private ObjectRepository objectRepository;
/**
* LDPath for the shortcut method setBodyCriteria
*
* Notice: Storing the path without the slash "/", because the passed LDPath expression can look like
* this : "[is-a ex:exampleType]". This would lead to this constructed path "oa:hasBody/[is-a ex:exampleType]",
* if we would append the slash to the BODY_PREFIX constant, which would be simple wrong!
*
*/
private final String BODY_PREFIX = "oa:hasBody";
/**
* LDPath for the shortcut method setTargetCriteria
*/
private final String TARGET_PREFIX = "oa:hasTarget/";
/**
* LDPath for the shortcut method setSourceCriteria
*
* Notice: Storing the path without the slash "/", because the passed LDPath expression can look like
* this : "[is-a ex:exampleType]". This would lead to this constructed path "oa:hasTarget/oa:hasSource/[is-a ex:exampleType]",
* if we would append the slash to the SOURCE_PREFIX constant, which would be simple wrong!
*/
private final String SOURCE_PREFIX = TARGET_PREFIX + "oa:hasSource";
/**
* LDPath for the shortcut method setSelectorCriteria
*
* Notice: Storing the path without the slash "/", because the passed LDPath expression can look like
* this : "[is-a ex:exampleType]". This would lead to this constructed path "oa:hasTarget/oa:hasSelector/[is-a ex:exampleType]",
* if we would append the slash to the SELECTOR_PREFIX constant, which would be simple wrong!
*/
private final String SELECTOR_PREFIX = TARGET_PREFIX + "oa:hasSelector";
/**
* All user defined name spaces
*/
private Map<String, String> prefixes = new HashMap<String, String>();
/**
* All user defined criteria
*/
private ArrayList<Criteria> criteria = new ArrayList<Criteria>();
/**
* Specifies the ordering of the result set
* TODO: evaluate if this is possible with the current implementation, because the user does not know the generated variable names etc...
*/
private Order order = null;
/**
* Limit value of the query
*/
private Integer limit = null;
/**
* Offset value for the query
*/
private Integer offset = null;
/**
* Object to apply optimization strategies to SPARQL queries
*/
private QueryOptimizer queryOptimizer = null;
/**
* Required to have an ongoing variable name when creating the SPARQL query
*/
private int varIndex = 0;
public QueryService(Class<T> type, ObjectRepository objectRepository) {
this(type, objectRepository, null);
}
public QueryService(Class<T> type, ObjectRepository objectRepository, URI graph) {
this.type = type;
this.objectRepository = objectRepository;
// Setting some standard name spaces
addPrefix(OADM.PREFIX, OADM.NS);
addPrefix(CNT.PREFIX, CNT.NS);
addPrefix(DC.PREFIX, DC.NS);
addPrefix(DCTERMS.PREFIX, DCTERMS.NS);
addPrefix(DCTYPES.PREFIX, DCTYPES.NS);
addPrefix(FOAF.PREFIX, FOAF.NS);
addPrefix(PROV.PREFIX, PROV.NS);
addPrefix(RDF.PREFIX, RDF.NS);
addPrefix(OWL.PREFIX, OWL.NAMESPACE);
addPrefix(RDFS.PREFIX, RDFS.NAMESPACE);
addPrefix(SKOS.PREFIX, SKOS.NAMESPACE);
this.queryOptimizer = QueryOptimizer.getInstance();
this.graph = graph;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.impl.* objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Body object
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setBodyCriteria(String ldpath, String value, Comparison comparison) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? BODY_PREFIX + ldpath : BODY_PREFIX + "/" + ldpath, value, comparison));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.impl.* objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Body object
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setBodyCriteria(String ldpath, Number value, Comparison comparison) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? BODY_PREFIX + ldpath : BODY_PREFIX + "/" + ldpath, value, comparison));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.impl.* objects. Compared to the
* other <i>setBodyCriteria</i> function, this function does not need a <b>Comparison</b> statement. Hence,
* the Comparison.EQ statement ("=") will be used automatically.
*
* @param ldpath Syntax similar to XPath. Beginning from the Body object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setBodyCriteria(String ldpath, String value) {
return setBodyCriteria(ldpath, value, Comparison.EQ);
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.impl.* objects. Compared to the
* other <i>setBodyCriteria</i> function, this function does not need a <b>Comparison</b> statement. Hence,
* the Comparison.EQ statement ("=") will be used automatically.
*
* @param ldpath Syntax similar to XPath. Beginning from the Body object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setBodyCriteria(String ldpath, Number value) {
return setBodyCriteria(ldpath, value, Comparison.EQ);
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.impl.* objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Body object
* @return itself to allow chaining.
*/
public QueryService setBodyCriteria(String ldpath) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? BODY_PREFIX + ldpath : BODY_PREFIX + "/" + ldpath, Comparison.EQ));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.AnnotationImpl objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Annotation object
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setAnnotationCriteria(String ldpath, String value, Comparison comparison) {
criteria.add(new Criteria(ldpath, value, comparison));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.AnnotationImpl objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Annotation object
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setAnnotationCriteria(String ldpath, Number value, Comparison comparison) {
criteria.add(new Criteria(ldpath, value, comparison));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.AnnotationImpl objects. Compared to the
* other <i>setAnnotationCriteria</i> function, this function does not need a Comparison statement. Hence, the
* <b>Comparison.EQ</b> statement ("=") will be used automatically.
*
* @param ldpath Syntax similar to XPath. Beginning from the Annotation object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setAnnotationCriteria(String ldpath, String value) {
return setAnnotationCriteria(ldpath, value, Comparison.EQ);
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.AnnotationImpl objects. Compared to the
* other <i>setAnnotationCriteria</i> function, this function does not need a Comparison statement. Hence, the
* <b>Comparison.EQ</b> statement ("=") will be used automatically.
*
* @param ldpath Syntax similar to XPath. Beginning from the Annotation object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setAnnotationCriteria(String ldpath, Number value) {
return setAnnotationCriteria(ldpath, value, Comparison.EQ);
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.AnnotationImpl objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Annotation object
* @return itself to allow chaining.
*/
public QueryService setAnnotationCriteria(String ldpath) {
criteria.add(new Criteria(ldpath, Comparison.EQ));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.selector.* objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Selector object
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setSelectorCriteria(String ldpath, String value, Comparison comparison) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? SELECTOR_PREFIX + ldpath : SELECTOR_PREFIX + "/" + ldpath, value, comparison));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.selector.* objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Selector object
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setSelectorCriteria(String ldpath, Number value, Comparison comparison) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? SELECTOR_PREFIX + ldpath : SELECTOR_PREFIX + "/" + ldpath, value, comparison));
return this;
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.selector.* objects. Compared to the
* other <i>setSelectorCriteria</i> function, this function does not need a Comparison statement. Hence, the
* <b>Comparison.EQ</b> statement ("=") will be used automatically.
*
* @param ldpath Syntax similar to XPath. Beginning from the Selector object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setSelectorCriteria(String ldpath, String value) {
return setSelectorCriteria(ldpath, value, Comparison.EQ);
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.selector.* objects. Compared to the
* other <i>setSelectorCriteria</i> function, this function does not need a Comparison statement. Hence, the
* <b>Comparison.EQ</b> statement ("=") will be used automatically.
*
* @param ldpath Syntax similar to XPath. Beginning from the Selector object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setSelectorCriteria(String ldpath, Number value) {
return setSelectorCriteria(ldpath, value, Comparison.EQ);
}
/**
* Setting a criteria for filtering eu.mico.platform.persistence.impl.selector.* objects.
*
* @param ldpath Syntax similar to XPath. Beginning from the Selector object
* @return itself to allow chaining.
*/
public QueryService setSelectorCriteria(String ldpath) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? SELECTOR_PREFIX + ldpath : SELECTOR_PREFIX + "/" + ldpath, Comparison.EQ));
return this;
}
/**
* @param ldpath Syntax similar to XPath. Beginning from the Source object
* @param value The constraint value
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @return itself to allow chaining.
*/
public QueryService setSourceCriteria(String ldpath, String value, Comparison comparison) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? SOURCE_PREFIX + ldpath : SOURCE_PREFIX + "/" + ldpath, value, comparison));
return this;
}
/**
* @param ldpath Syntax similar to XPath. Beginning from the Source object
* @param value The constraint value
* @param comparison The comparison mode, e.g. Comparison.EQ (=)
* @return itself to allow chaining.
*/
public QueryService setSourceCriteria(String ldpath, Number value, Comparison comparison) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? SOURCE_PREFIX + ldpath : SOURCE_PREFIX + "/" + ldpath, value, comparison));
return this;
}
/**
* @param ldpath Syntax similar to XPath. Beginning from the Source object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setSourceCriteria(String ldpath, String value) {
return setSourceCriteria(ldpath, value, Comparison.EQ);
}
/**
* @param ldpath Syntax similar to XPath. Beginning from the Source object
* @param value The constraint value
* @return itself to allow chaining.
*/
public QueryService setSourceCriteria(String ldpath, Number value) {
return setSourceCriteria(ldpath, value, Comparison.EQ);
}
/**
* @param ldpath Syntax similar to XPath. Beginning from the Source object
* @return itself to allow chaining.
*/
public QueryService setSourceCriteria(String ldpath) {
criteria.add(new Criteria((ldpath.startsWith("[")) ? SOURCE_PREFIX + ldpath : SOURCE_PREFIX + "/" + ldpath, Comparison.EQ));
return this;
}
/**
* Adding a criteria object to the QueryService
*
* @param criteria The criteria object
* @return itself to allow chaining.
*/
public QueryService addCriteriaObject(Criteria criteria) {
this.criteria.add(criteria);
return this;
}
/**
* Setting shortcut names for URI prefixes.
*
* @param label The label of the namespace, e.g. foaf
* @param url The URL
* @return itself to allow chaining.
*/
public QueryService addPrefix(String label, String url) {
this.prefixes.put(label, url);
return this;
}
/**
* Setting multiple names for URI prefixes.
*
* @param prefixes HashMap with multiple namespaces.
* @return itself to allow chaining.
*/
public QueryService addPrefixes(HashMap<String, String> prefixes) {
this.prefixes.putAll(prefixes);
return this;
}
/**
* Defines the ordering of the result set.
*
* @param order Defines the order of the result set.
* @return itself to allow chaining.
*/
public QueryService orderBy(Order order) {
this.order = order;
return this;
}
/**
* Setting the limit value.
*
* @param limit The limit value.
* @return itself to allow chaining.
*/
public QueryService limit(Integer limit) {
this.limit = limit;
return this;
}
/**
* Setting the offset value.
*
* @param offset The offset value.
* @return itself to allow chaining.
*/
public QueryService offset(Integer offset) {
this.offset = offset;
return this;
}
/**
* Creates and executes the SPARQL query according to the
* criteria specified by the user.
*
* @param <T>
* @return the result set
*/
public <T> List<T> execute() throws ParseException, RepositoryException, MalformedQueryException, QueryEvaluationException {
ObjectConnection con = objectRepository.getConnection();
if(graph != null) {
con.setReadContexts(graph);
con.setInsertContext(graph);
con.setRemoveContexts(graph);
}
Query sparql = EvalQuery.evaluate(criteria, prefixes);
if (limit != null) {
sparql.setLimit(limit);
}
if (offset != null) {
sparql.setOffset(offset);
}
// Print with line numbers
// sparql.serialize(new IndentedWriter(System.out, true));
// System.out.println();
String q = sparql.serialize();
logger.debug("Created query:\n" + queryOptimizer.prettyPrint(q));
// Optimize the join order
q = queryOptimizer.optimizeJoinOrder(q);
logger.debug("Join order optimized:\n " + q);
// Optimize the FILTER placement
q = queryOptimizer.optimizeFilters(q);
logger.debug("FILTERs optimized:\n " + q);
ObjectQuery query = con.prepareObjectQuery(q);
if (query.getDataset() != null) {
logger.info("\nGRAPH CONTEXT = " + query.getDataset().getDefaultGraphs() + "\nFINAL QUERY :\n" + q);
} else {
logger.info("\nFINAL QUERY :\n" + q);
}
List<T> resultList = (List<T>) query.evaluate(this.type).asList();
return resultList;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/model_service.proto
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Details of [ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel] operation.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.UploadModelOperationMetadata}
*/
public final class UploadModelOperationMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.UploadModelOperationMetadata)
UploadModelOperationMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use UploadModelOperationMetadata.newBuilder() to construct.
private UploadModelOperationMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UploadModelOperationMetadata() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UploadModelOperationMetadata();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private UploadModelOperationMetadata(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder subBuilder = null;
if (genericMetadata_ != null) {
subBuilder = genericMetadata_.toBuilder();
}
genericMetadata_ =
input.readMessage(
com.google.cloud.aiplatform.v1.GenericOperationMetadata.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(genericMetadata_);
genericMetadata_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_UploadModelOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_UploadModelOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata.class,
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata.Builder.class);
}
public static final int GENERIC_METADATA_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
@java.lang.Override
public boolean hasGenericMetadata() {
return genericMetadata_ != null;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
return getGenericMetadata();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (genericMetadata_ != null) {
output.writeMessage(1, getGenericMetadata());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (genericMetadata_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.UploadModelOperationMetadata)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata other =
(com.google.cloud.aiplatform.v1.UploadModelOperationMetadata) obj;
if (hasGenericMetadata() != other.hasGenericMetadata()) return false;
if (hasGenericMetadata()) {
if (!getGenericMetadata().equals(other.getGenericMetadata())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGenericMetadata()) {
hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getGenericMetadata().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Details of [ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel] operation.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.UploadModelOperationMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.UploadModelOperationMetadata)
com.google.cloud.aiplatform.v1.UploadModelOperationMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_UploadModelOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_UploadModelOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata.class,
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.UploadModelOperationMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (genericMetadataBuilder_ == null) {
genericMetadata_ = null;
} else {
genericMetadata_ = null;
genericMetadataBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_UploadModelOperationMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UploadModelOperationMetadata getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.UploadModelOperationMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UploadModelOperationMetadata build() {
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UploadModelOperationMetadata buildPartial() {
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata result =
new com.google.cloud.aiplatform.v1.UploadModelOperationMetadata(this);
if (genericMetadataBuilder_ == null) {
result.genericMetadata_ = genericMetadata_;
} else {
result.genericMetadata_ = genericMetadataBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.UploadModelOperationMetadata) {
return mergeFrom((com.google.cloud.aiplatform.v1.UploadModelOperationMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.UploadModelOperationMetadata other) {
if (other == com.google.cloud.aiplatform.v1.UploadModelOperationMetadata.getDefaultInstance())
return this;
if (other.hasGenericMetadata()) {
mergeGenericMetadata(other.getGenericMetadata());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1.UploadModelOperationMetadata parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1.UploadModelOperationMetadata) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
genericMetadataBuilder_;
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
public boolean hasGenericMetadata() {
return genericMetadataBuilder_ != null || genericMetadata_ != null;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
if (genericMetadataBuilder_ == null) {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
} else {
return genericMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
genericMetadata_ = value;
onChanged();
} else {
genericMetadataBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder builderForValue) {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = builderForValue.build();
onChanged();
} else {
genericMetadataBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder mergeGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (genericMetadata_ != null) {
genericMetadata_ =
com.google.cloud.aiplatform.v1.GenericOperationMetadata.newBuilder(genericMetadata_)
.mergeFrom(value)
.buildPartial();
} else {
genericMetadata_ = value;
}
onChanged();
} else {
genericMetadataBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder clearGenericMetadata() {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = null;
onChanged();
} else {
genericMetadata_ = null;
genericMetadataBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder
getGenericMetadataBuilder() {
onChanged();
return getGenericMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
if (genericMetadataBuilder_ != null) {
return genericMetadataBuilder_.getMessageOrBuilder();
} else {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
getGenericMetadataFieldBuilder() {
if (genericMetadataBuilder_ == null) {
genericMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>(
getGenericMetadata(), getParentForChildren(), isClean());
genericMetadata_ = null;
}
return genericMetadataBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.UploadModelOperationMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.UploadModelOperationMetadata)
private static final com.google.cloud.aiplatform.v1.UploadModelOperationMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.UploadModelOperationMetadata();
}
public static com.google.cloud.aiplatform.v1.UploadModelOperationMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UploadModelOperationMetadata> PARSER =
new com.google.protobuf.AbstractParser<UploadModelOperationMetadata>() {
@java.lang.Override
public UploadModelOperationMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UploadModelOperationMetadata(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UploadModelOperationMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UploadModelOperationMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UploadModelOperationMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package ua.p2psafety.fragments;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.inputmethod.EditorInfo;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import ua.p2psafety.util.EventManager;
import ua.p2psafety.R;
import ua.p2psafety.SosActivity;
import ua.p2psafety.json.Event;
import ua.p2psafety.services.DelayedSosService;
import ua.p2psafety.util.NetworkManager;
import ua.p2psafety.data.Prefs;
import ua.p2psafety.util.Logs;
import ua.p2psafety.util.Utils;
/**
* Created by ihorpysmennyi on 12/14/13.
*/
public class SendMessageFragment extends Fragment {
Button mDelayedSosBtn;
Button mSupportScreenBtn;
Button mPassiveSosBtn;
Button mSosBtn;
Activity mActivity;
Logs mLogs;
private View.OnClickListener lsnr = new View.OnClickListener() {
@Override
public void onClick(View v) {
Fragment mfragment;
FragmentManager mfragmentManager = getFragmentManager();
FragmentTransaction fragmentTransaction = mfragmentManager.beginTransaction();
switch (v.getId()) {
case R.id.delayedSosBtn:
mfragment = new DelayedSosFragment();
if (!Utils.isFragmentAdded(mfragment, mfragmentManager))
{
fragmentTransaction.addToBackStack(mfragment.getClass().getName());
fragmentTransaction.replace(R.id.content_frame, mfragment).commit();
}
break;
}
}
};
public SendMessageFragment() {
super();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mActivity = getActivity();
mLogs = new Logs(mActivity);
mLogs.info("SendMessageFragment.onCreateView()");
((SosActivity) getActivity()).getSupportActionBar().setHomeButtonEnabled(false);
((SosActivity) getActivity()).getSupportActionBar().setDisplayHomeAsUpEnabled(false);
Typeface font = Typeface.createFromAsset(getActivity().getAssets(), "fonts/RobotoCondensed-Bold.ttf");
View rootView = inflater.inflate(R.layout.frag_sendmessage, container, false);
mDelayedSosBtn = (Button) rootView.findViewById(R.id.delayedSosBtn);
mDelayedSosBtn.setOnClickListener(lsnr);
mSosBtn = (Button) rootView.findViewById(R.id.button);
mSosBtn.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
mLogs.info("SendMessageFragment. SOS button clicked");
Utils.checkForLocationServices(mActivity);
EventManager eventManager = EventManager.getInstance(mActivity);
if (eventManager.isSosStarted()) {
mLogs.info("SendMessageFragment. SOS active");
if (!Prefs.getUsePassword(mActivity)) {
mLogs.info("SendMessageFragment. No password required. Stoping SOS");
Utils.setLoading(mActivity, true);
eventManager.stopSos();
mSosBtn.setText(getString(R.string.sos));
} else {
mLogs.info("SendMessageFragment. Password required. Asking it");
askPasswordAndCancelSos();
}
} else {
mLogs.info("SendMessageFragment. SOS not active");
// stop delayed SOS if it is on
if (DelayedSosService.isTimerOn()) {
mLogs.info("SendMessageFragment. Delayed SOS active. Stoping it");
mActivity.stopService(new Intent(mActivity, DelayedSosService.class));
}
if (eventManager.isSupportStarted())
mSupportScreenBtn.setBackgroundColor(Color.GRAY);
// start normal sos
Utils.setLoading(mActivity, true);
mLogs.info("SendMessageFragment. Starting SOS");
eventManager.startSos();
mSosBtn.setText(getResources().getString(R.string.sos_cancel));
}
return false;
}
});
mSupportScreenBtn = (Button) rootView.findViewById(R.id.SupportScreenBtn);
if (EventManager.getInstance(mActivity).isSupportStarted())
mSupportScreenBtn.setBackgroundColor(getResources().getColor(R.color.SOSRed));
else
mSupportScreenBtn.setBackgroundColor(Color.GRAY);
mSupportScreenBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (EventManager.getInstance(mActivity).isSupportStarted()) {
Fragment fragment = new SupporterFragment();
getFragmentManager().beginTransaction()
.addToBackStack(null).replace(R.id.content_frame, fragment).commit();
} else {
Toast.makeText(mActivity, R.string.not_supporter_mode, Toast.LENGTH_LONG)
.show();
}
}
});
mPassiveSosBtn = (Button) rootView.findViewById(R.id.btn_passive_sos);
mPassiveSosBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mLogs.info("SendMessageFragment. Passive SOS button pressed. Showing PassiveSosFragment");
getFragmentManager()
.beginTransaction()
.addToBackStack(null)
.replace(R.id.content_frame, new PassiveSosFragment())
.commit();
}
});
Button settingsBtn = (Button) rootView.findViewById(R.id.btn_settings);
settingsBtn.setTypeface(font);
settingsBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mLogs.info("SendMessageFragment. Settings button pressed. Showing SettingsFragment");
Fragment mFragment = new SettingsFragment();
FragmentManager mFragmentManager = getFragmentManager();
FragmentTransaction fragmentTransaction = mFragmentManager.beginTransaction();
// for (int i = 0; i < mFragmentManager.getBackStackEntryCount(); ++i) {
// mFragmentManager.popBackStack();
// }
fragmentTransaction.addToBackStack("SettingsFragment");
fragmentTransaction.replace(R.id.content_frame, mFragment).commit();
}
});
((TextView)rootView.findViewById(R.id.textView)).setTypeface(font);
rootView.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if( keyCode == KeyEvent.KEYCODE_BACK )
{
mLogs.info("SendMessageFragment. Back button pressed. Finishing activity");
getActivity().finish();
return true;
}
return false;
}
});
return rootView;
}
@Override
public void onResume() {
super.onResume();
mLogs.info("SendMessageFragment.onResume())");
if (EventManager.getInstance(mActivity).isSosStarted()) {
mLogs.info("SendMessageFragment.onResume() SOS active");
mSosBtn.setText(getString(R.string.sos_cancel));
} else {
mLogs.info("SendMessageFragment.onResume() SOS not active");
mSosBtn.setText(getString(R.string.sos));
}
try { EventManager.getInstance(mActivity).getEvent(); }
catch (Exception e) {
if (Utils.isServerAuthenticated(mActivity)){
mLogs.info("SendMessageFragment.onResume() No event, trying to create one");
Utils.setLoading(mActivity, true);
NetworkManager.createEvent(mActivity,
new NetworkManager.DeliverResultRunnable<Event>() {
@Override
public void onError(int errorCode) {
super.onError(errorCode);
Utils.setLoading(mActivity, false);
}
@Override
public void deliver(Event event) {
//sometimes event is null :\
if (event != null)
{
mLogs.info("SendMessageFragment.onResume() event created: " +
event.getId()); // TODO: make event.toString()
EventManager.getInstance(mActivity).setEvent(event);
}
Utils.setLoading(mActivity, false);
}
});
}
}
}
// builds dialog with password prompt
private void askPasswordAndCancelSos() {
mLogs.info("SendMessageFragment. Showing password dialog");
LayoutInflater li = LayoutInflater.from(mActivity);
View promptsView = li.inflate(R.layout.password_dialog, null);
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(mActivity);
alertDialogBuilder.setView(promptsView);
final EditText userInput = (EditText) promptsView.findViewById(R.id.pd_password_edit);
alertDialogBuilder
.setCancelable(false)
.setPositiveButton(android.R.string.ok,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
mLogs.info("SendMessageFragment. Password entered. Checking");
checkPasswordAndCancelSos(userInput.getText().toString());
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
mLogs.info("SendMessageFragment. Password dialog canceled");
dialog.cancel();
}
});
final AlertDialog alertDialog = alertDialogBuilder.create();
alertDialog.show();
alertDialog.getWindow().setSoftInputMode (WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE);
userInput.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_DONE) {
mLogs.info("SendMessageFragment. Password entered. Checking");
checkPasswordAndCancelSos(userInput.getText().toString());
alertDialog.dismiss();
}
return true;
}
});
}
// cancels sos or builds dialog with retry/cancel buttons
private void checkPasswordAndCancelSos(String password) {
if (password.equals(Prefs.getPassword(mActivity))) {
mLogs.info("SendMessageFragment. Password correct. Stoping SOS");
Utils.setLoading(mActivity, true);
EventManager.getInstance(mActivity).stopSos();
mSosBtn.setText(getString(R.string.sos));
}
else {
mLogs.info("SendMessageFragment. Password incorrect");
AlertDialog.Builder builder = new AlertDialog.Builder(mActivity);
builder.setTitle(R.string.wrong_password);
builder.setNegativeButton(android.R.string.cancel, null);
builder.setPositiveButton(R.string.retry, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
mLogs.info("SendMessageFragment. User wants to enter password again");
askPasswordAndCancelSos();
}
});
builder.create().show();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.net.ftp;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import org.apache.commons.net.util.Charsets;
/**
* This class handles the entire process of parsing a listing of
* file entries from the server.
* <p>
* This object defines a two-part parsing mechanism.
* <p>
* The first part is comprised of reading the raw input into an internal
* list of strings. Every item in this list corresponds to an actual
* file. All extraneous matter emitted by the server will have been
* removed by the end of this phase. This is accomplished in conjunction
* with the FTPFileEntryParser associated with this engine, by calling
* its methods <code>readNextEntry()</code> - which handles the issue of
* what delimits one entry from another, usually but not always a line
* feed and <code>preParse()</code> - which handles removal of
* extraneous matter such as the preliminary lines of a listing, removal
* of duplicates on versioning systems, etc.
* <p>
* The second part is composed of the actual parsing, again in conjunction
* with the particular parser used by this engine. This is controlled
* by an iterator over the internal list of strings. This may be done
* either in block mode, by calling the <code>getNext()</code> and
* <code>getPrevious()</code> methods to provide "paged" output of less
* than the whole list at one time, or by calling the
* <code>getFiles()</code> method to return the entire list.
* <p>
* Examples:
* <p>
* Paged access:
* <pre>
* FTPClient f=FTPClient();
* f.connect(server);
* f.login(username, password);
* FTPListParseEngine engine = f.initiateListParsing(directory);
*
* while (engine.hasNext()) {
* FTPFile[] files = engine.getNext(25); // "page size" you want
* //do whatever you want with these files, display them, etc.
* //expensive FTPFile objects not created until needed.
* }
* </pre>
* <p>
* For unpaged access, simply use FTPClient.listFiles(). That method
* uses this class transparently.
*/
public class FTPListParseEngine {
/**
* An empty immutable {@code FTPFile} array.
*/
private static final FTPFile[] EMPTY_FTP_FILE_ARRAY = new FTPFile[0];
private List<String> entries = new LinkedList<>();
private ListIterator<String> internalIterator = entries.listIterator();
private final FTPFileEntryParser parser;
// Should invalid files (parse failures) be allowed?
private final boolean saveUnparseableEntries;
public FTPListParseEngine(final FTPFileEntryParser parser) {
this(parser, null);
}
/**
* Intended for use by FTPClient only
* @since 3.4
*/
FTPListParseEngine(final FTPFileEntryParser parser, final FTPClientConfig configuration) {
this.parser = parser;
if (configuration != null) {
this.saveUnparseableEntries = configuration.getUnparseableEntries();
} else {
this.saveUnparseableEntries = false;
}
}
/**
* Returns an array of FTPFile objects containing the whole list of
* files returned by the server as read by this object's parser.
*
* @return an array of FTPFile objects containing the whole list of
* files returned by the server as read by this object's parser.
* None of the entries will be null
* @throws IOException - not ever thrown, may be removed in a later release
*/
public FTPFile[] getFiles()
throws IOException // TODO remove; not actually thrown
{
return getFiles(FTPFileFilters.NON_NULL);
}
/**
* Returns an array of FTPFile objects containing the whole list of
* files returned by the server as read by this object's parser.
* The files are filtered before being added to the array.
*
* @param filter FTPFileFilter, must not be <code>null</code>.
*
* @return an array of FTPFile objects containing the whole list of
* files returned by the server as read by this object's parser.
* <p><b>
* NOTE:</b> This array may contain null members if any of the
* individual file listings failed to parse. The caller should
* check each entry for null before referencing it, or use the
* a filter such as {@link FTPFileFilters#NON_NULL} which does not
* allow null entries.
* @since 2.2
* @throws IOException - not ever thrown, may be removed in a later release
*/
public FTPFile[] getFiles(final FTPFileFilter filter)
throws IOException // TODO remove; not actually thrown
{
final List<FTPFile> tmpResults = new ArrayList<>();
final Iterator<String> iter = this.entries.iterator();
while (iter.hasNext()) {
final String entry = iter.next();
FTPFile temp = this.parser.parseFTPEntry(entry);
if (temp == null && saveUnparseableEntries) {
temp = new FTPFile(entry);
}
if (filter.accept(temp)) {
tmpResults.add(temp);
}
}
return tmpResults.toArray(EMPTY_FTP_FILE_ARRAY);
}
/**
* Returns an array of at most <code>quantityRequested</code> FTPFile
* objects starting at this object's internal iterator's current position.
* If fewer than <code>quantityRequested</code> such
* elements are available, the returned array will have a length equal
* to the number of entries at and after after the current position.
* If no such entries are found, this array will have a length of 0.
*
* After this method is called this object's internal iterator is advanced
* by a number of positions equal to the size of the array returned.
*
* @param quantityRequested
* the maximum number of entries we want to get.
*
* @return an array of at most <code>quantityRequested</code> FTPFile
* objects starting at the current position of this iterator within its
* list and at least the number of elements which exist in the list at
* and after its current position.
* <p><b>
* NOTE:</b> This array may contain null members if any of the
* individual file listings failed to parse. The caller should
* check each entry for null before referencing it.
*/
public FTPFile[] getNext(final int quantityRequested) {
final List<FTPFile> tmpResults = new LinkedList<>();
int count = quantityRequested;
while (count > 0 && this.internalIterator.hasNext()) {
final String entry = this.internalIterator.next();
FTPFile temp = this.parser.parseFTPEntry(entry);
if (temp == null && saveUnparseableEntries) {
temp = new FTPFile(entry);
}
tmpResults.add(temp);
count--;
}
return tmpResults.toArray(EMPTY_FTP_FILE_ARRAY);
}
/**
* Returns an array of at most <code>quantityRequested</code> FTPFile
* objects starting at this object's internal iterator's current position,
* and working back toward the beginning.
*
* If fewer than <code>quantityRequested</code> such
* elements are available, the returned array will have a length equal
* to the number of entries at and after after the current position.
* If no such entries are found, this array will have a length of 0.
*
* After this method is called this object's internal iterator is moved
* back by a number of positions equal to the size of the array returned.
*
* @param quantityRequested
* the maximum number of entries we want to get.
*
* @return an array of at most <code>quantityRequested</code> FTPFile
* objects starting at the current position of this iterator within its
* list and at least the number of elements which exist in the list at
* and after its current position. This array will be in the same order
* as the underlying list (not reversed).
* <p><b>
* NOTE:</b> This array may contain null members if any of the
* individual file listings failed to parse. The caller should
* check each entry for null before referencing it.
*/
public FTPFile[] getPrevious(final int quantityRequested) {
final List<FTPFile> tmpResults = new LinkedList<>();
int count = quantityRequested;
while (count > 0 && this.internalIterator.hasPrevious()) {
final String entry = this.internalIterator.previous();
FTPFile temp = this.parser.parseFTPEntry(entry);
if (temp == null && saveUnparseableEntries) {
temp = new FTPFile(entry);
}
tmpResults.add(0,temp);
count--;
}
return tmpResults.toArray(EMPTY_FTP_FILE_ARRAY);
}
/**
* convenience method to allow clients to know whether this object's
* internal iterator's current position is at the end of the list.
*
* @return true if internal iterator is not at end of list, false
* otherwise.
*/
public boolean hasNext() {
return internalIterator.hasNext();
}
/**
* convenience method to allow clients to know whether this object's
* internal iterator's current position is at the beginning of the list.
*
* @return true if internal iterator is not at beginning of list, false
* otherwise.
*/
public boolean hasPrevious() {
return internalIterator.hasPrevious();
}
/**
* Internal method for reading (and closing) the input into the <code>entries</code> list. After this method has
* completed, <code>entries</code> will contain a collection of entries (as defined by
* <code>FTPFileEntryParser.readNextEntry()</code>), but this may contain various non-entry preliminary lines from
* the server output, duplicates, and other data that will not be part of the final listing.
*
* @param inputStream The socket stream on which the input will be read.
* @param charsetName The encoding to use.
*
* @throws IOException thrown on any failure to read the stream
*/
private void read(final InputStream inputStream, final String charsetName) throws IOException {
try (final BufferedReader reader = new BufferedReader(
new InputStreamReader(inputStream, Charsets.toCharset(charsetName)))) {
String line = this.parser.readNextEntry(reader);
while (line != null) {
this.entries.add(line);
line = this.parser.readNextEntry(reader);
}
}
}
/**
* Do not use.
* @param inputStream the stream from which to read
* @throws IOException on error
* @deprecated use {@link #readServerList(InputStream, String)} instead
*/
@Deprecated
public void readServerList(final InputStream inputStream) throws IOException {
readServerList(inputStream, null);
}
/**
* Reads (and closes) the initial reading and preparsing of the list returned by the server. After this method has
* completed, this object will contain a list of unparsed entries (Strings) each referring to a unique file on the
* server.
*
* @param inputStream input stream provided by the server socket.
* @param charsetName the encoding to be used for reading the stream
*
* @throws IOException thrown on any failure to read from the sever.
*/
public void readServerList(final InputStream inputStream, final String charsetName) throws IOException {
this.entries = new LinkedList<>();
read(inputStream, charsetName);
this.parser.preParse(this.entries);
resetIterator();
}
// DEPRECATED METHODS - for API compatibility only - DO NOT USE
/**
* resets this object's internal iterator to the beginning of the list.
*/
public void resetIterator() {
this.internalIterator = this.entries.listIterator();
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.junit2.configuration;
import com.intellij.application.options.ModulesComboBox;
import com.intellij.execution.ExecutionBundle;
import com.intellij.execution.MethodBrowser;
import com.intellij.execution.configuration.BrowseModuleValueActionListener;
import com.intellij.execution.junit.JUnitConfiguration;
import com.intellij.execution.junit.JUnitConfigurationType;
import com.intellij.execution.junit.JUnitUtil;
import com.intellij.execution.junit.TestClassFilter;
import com.intellij.execution.testframework.SourceScope;
import com.intellij.execution.testframework.TestSearchScope;
import com.intellij.execution.ui.*;
import com.intellij.icons.AllIcons;
import com.intellij.ide.util.ClassFilter;
import com.intellij.ide.util.PackageChooserDialog;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.fileChooser.FileChooserFactory;
import com.intellij.openapi.fileTypes.PlainTextLanguage;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.options.SettingsEditor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.*;
import com.intellij.openapi.ui.ex.MessagesEx;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vcs.changes.LocalChangeList;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.rt.execution.junit.RepeatCount;
import com.intellij.ui.*;
import com.intellij.ui.components.JBLabel;
import com.intellij.util.IconUtil;
import com.intellij.util.ui.UIUtil;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.text.Document;
import javax.swing.text.PlainDocument;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Arrays;
import java.util.List;
public class JUnitConfigurable<T extends JUnitConfiguration> extends SettingsEditor<T> implements PanelWithAnchor {
private static final List<TIntArrayList> ourEnabledFields = Arrays.asList(
new TIntArrayList(new int[]{0}),
new TIntArrayList(new int[]{1}),
new TIntArrayList(new int[]{1, 2}),
new TIntArrayList(new int[]{3}),
new TIntArrayList(new int[]{4}),
new TIntArrayList(new int[]{5}),
new TIntArrayList(new int[]{1, 2}),
new TIntArrayList(new int[]{6})
);
private static final String[] FORK_MODE_ALL =
{JUnitConfiguration.FORK_NONE, JUnitConfiguration.FORK_METHOD, JUnitConfiguration.FORK_KLASS};
private static final String[] FORK_MODE = {JUnitConfiguration.FORK_NONE, JUnitConfiguration.FORK_METHOD};
private final ConfigurationModuleSelector myModuleSelector;
private final LabeledComponent[] myTestLocations = new LabeledComponent[6];
private final JUnitConfigurationModel myModel;
private final BrowseModuleValueActionListener[] myBrowsers;
private JComponent myPackagePanel;
private LabeledComponent<EditorTextFieldWithBrowseButton> myPackage;
private LabeledComponent<TextFieldWithBrowseButton> myDir;
private LabeledComponent<JPanel> myPattern;
private LabeledComponent<EditorTextFieldWithBrowseButton> myClass;
private LabeledComponent<EditorTextFieldWithBrowseButton> myMethod;
private LabeledComponent<EditorTextFieldWithBrowseButton> myCategory;
// Fields
private JPanel myWholePanel;
private LabeledComponent<ModulesComboBox> myModule;
private CommonJavaParametersPanel myCommonJavaParameters;
private JRadioButton myWholeProjectScope;
private JRadioButton mySingleModuleScope;
private JRadioButton myModuleWDScope;
private TextFieldWithBrowseButton myPatternTextField;
private JrePathEditor myJrePathEditor;
private JComboBox myForkCb;
private JBLabel myTestLabel;
private JComboBox myTypeChooser;
private JBLabel mySearchForTestsLabel;
private JPanel myScopesPanel;
private JComboBox myRepeatCb;
private JTextField myRepeatCountField;
private LabeledComponent<JComboBox<String>> myChangeListLabeledComponent;
private Project myProject;
private JComponent anchor;
public JUnitConfigurable(final Project project) {
myProject = project;
myModel = new JUnitConfigurationModel(project);
myModuleSelector = new ConfigurationModuleSelector(project, getModulesComponent());
myJrePathEditor.setDefaultJreSelector(DefaultJreSelector.fromModuleDependencies(getModulesComponent(), false));
myCommonJavaParameters.setModuleContext(myModuleSelector.getModule());
myCommonJavaParameters.setHasModuleMacro();
myModule.getComponent().addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
myCommonJavaParameters.setModuleContext(myModuleSelector.getModule());
}
});
myBrowsers = new BrowseModuleValueActionListener[]{
new PackageChooserActionListener(project),
new TestClassBrowser(project),
new MethodBrowser(project) {
protected Condition<PsiMethod> getFilter(PsiClass testClass) {
return new JUnitUtil.TestMethodFilter(testClass);
}
@Override
protected String getClassName() {
return JUnitConfigurable.this.getClassName();
}
@Override
protected ConfigurationModuleSelector getModuleSelector() {
return myModuleSelector;
}
},
new TestsChooserActionListener(project),
new BrowseModuleValueActionListener(project) {
@Override
protected String showDialog() {
final VirtualFile virtualFile =
FileChooser.chooseFile(FileChooserDescriptorFactory.createSingleFolderDescriptor(), project, null);
if (virtualFile != null) {
return FileUtil.toSystemDependentName(virtualFile.getPath());
}
return null;
}
},
new CategoryBrowser(project)
};
// Garbage support
final DefaultComboBoxModel aModel = new DefaultComboBoxModel();
aModel.addElement(JUnitConfigurationModel.ALL_IN_PACKAGE);
aModel.addElement(JUnitConfigurationModel.DIR);
aModel.addElement(JUnitConfigurationModel.PATTERN);
aModel.addElement(JUnitConfigurationModel.CLASS);
aModel.addElement(JUnitConfigurationModel.METHOD);
aModel.addElement(JUnitConfigurationModel.CATEGORY);
if (Registry.is("testDiscovery.enabled")) {
aModel.addElement(JUnitConfigurationModel.BY_SOURCE_POSITION);
aModel.addElement(JUnitConfigurationModel.BY_SOURCE_CHANGES);
}
myTypeChooser.setModel(aModel);
myTypeChooser.setRenderer(new ListCellRendererWrapper<Integer>() {
@Override
public void customize(JList list, Integer value, int index, boolean selected, boolean hasFocus) {
switch (value) {
case JUnitConfigurationModel.ALL_IN_PACKAGE:
setText("All in package");
break;
case JUnitConfigurationModel.DIR:
setText("All in directory");
break;
case JUnitConfigurationModel.PATTERN:
setText("Pattern");
break;
case JUnitConfigurationModel.CLASS:
setText("Class");
break;
case JUnitConfigurationModel.METHOD:
setText("Method");
break;
case JUnitConfigurationModel.CATEGORY:
setText("Category");
break;
case JUnitConfigurationModel.BY_SOURCE_POSITION:
setText("Through source location");
break;
case JUnitConfigurationModel.BY_SOURCE_CHANGES:
setText("Over changes in sources");
break;
}
}
});
myTestLocations[JUnitConfigurationModel.ALL_IN_PACKAGE] = myPackage;
myTestLocations[JUnitConfigurationModel.CLASS] = myClass;
myTestLocations[JUnitConfigurationModel.METHOD] = myMethod;
myTestLocations[JUnitConfigurationModel.DIR] = myDir;
myTestLocations[JUnitConfigurationModel.CATEGORY] = myCategory;
myRepeatCb.setModel(new DefaultComboBoxModel(RepeatCount.REPEAT_TYPES));
myRepeatCb.setSelectedItem(RepeatCount.ONCE);
myRepeatCb.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
myRepeatCountField.setEnabled(RepeatCount.N.equals(myRepeatCb.getSelectedItem()));
}
});
final JPanel panel = myPattern.getComponent();
panel.setLayout(new BorderLayout());
myPatternTextField = new TextFieldWithBrowseButton();
myPatternTextField.setButtonIcon(IconUtil.getAddIcon());
panel.add(myPatternTextField, BorderLayout.CENTER);
final FixedSizeButton editBtn = new FixedSizeButton();
editBtn.setIcon(AllIcons.Actions.ShowViewer);
editBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Messages.showTextAreaDialog(myPatternTextField.getTextField(), "Configure suite tests", "EditParametersPopupWindow");
}
});
panel.add(editBtn, BorderLayout.EAST);
myTestLocations[JUnitConfigurationModel.PATTERN] = myPattern;
final FileChooserDescriptor dirFileChooser = FileChooserDescriptorFactory.createSingleFolderDescriptor();
dirFileChooser.setHideIgnored(false);
final JTextField textField = myDir.getComponent().getTextField();
InsertPathAction.addTo(textField, dirFileChooser);
FileChooserFactory.getInstance().installFileCompletion(textField, dirFileChooser, true, null);
// Done
myModel.setListener(this);
myTypeChooser.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final Object selectedItem = myTypeChooser.getSelectedItem();
myModel.setType((Integer)selectedItem);
changePanel();
}
}
);
myRepeatCb.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if ((Integer) myTypeChooser.getSelectedItem() == JUnitConfigurationModel.CLASS) {
myForkCb.setModel(getForkModelBasedOnRepeat());
}
}
});
myModel.setType(JUnitConfigurationModel.CLASS);
installDocuments();
addRadioButtonsListeners(new JRadioButton[]{myWholeProjectScope, mySingleModuleScope, myModuleWDScope}, null);
myWholeProjectScope.addChangeListener(new ChangeListener() {
public void stateChanged(final ChangeEvent e) {
onScopeChanged();
}
});
UIUtil.setEnabled(myCommonJavaParameters.getProgramParametersComponent(), false, true);
setAnchor(mySearchForTestsLabel);
myJrePathEditor.setAnchor(myModule.getLabel());
myCommonJavaParameters.setAnchor(myModule.getLabel());
final DefaultComboBoxModel<String> model = new DefaultComboBoxModel<>();
myChangeListLabeledComponent.getComponent().setModel(model);
model.addElement("All");
final List<LocalChangeList> changeLists = ChangeListManager.getInstance(project).getChangeLists();
for (LocalChangeList changeList : changeLists) {
model.addElement(changeList.getName());
}
}
private static void addRadioButtonsListeners(final JRadioButton[] radioButtons, ChangeListener listener) {
final ButtonGroup group = new ButtonGroup();
for (final JRadioButton radioButton : radioButtons) {
radioButton.getModel().addChangeListener(listener);
group.add(radioButton);
}
if (group.getSelection() == null) group.setSelected(radioButtons[0].getModel(), true);
}
public void applyEditorTo(@NotNull final JUnitConfiguration configuration) {
configuration.setRepeatMode((String)myRepeatCb.getSelectedItem());
try {
configuration.setRepeatCount(Integer.parseInt(myRepeatCountField.getText()));
}
catch (NumberFormatException e) {
configuration.setRepeatCount(1);
}
myModel.apply(getModuleSelector().getModule(), configuration);
configuration.getPersistentData().setChangeList((String)myChangeListLabeledComponent.getComponent().getSelectedItem());
applyHelpersTo(configuration);
final JUnitConfiguration.Data data = configuration.getPersistentData();
if (myWholeProjectScope.isSelected()) {
data.setScope(TestSearchScope.WHOLE_PROJECT);
}
else if (mySingleModuleScope.isSelected()) {
data.setScope(TestSearchScope.SINGLE_MODULE);
}
else if (myModuleWDScope.isSelected()) {
data.setScope(TestSearchScope.MODULE_WITH_DEPENDENCIES);
}
configuration.setAlternativeJrePath(myJrePathEditor.getJrePathOrName());
configuration.setAlternativeJrePathEnabled(myJrePathEditor.isAlternativeJreSelected());
myCommonJavaParameters.applyTo(configuration);
configuration.setForkMode((String)myForkCb.getSelectedItem());
}
public void resetEditorFrom(@NotNull final JUnitConfiguration configuration) {
final int count = configuration.getRepeatCount();
myRepeatCountField.setText(String.valueOf(count));
myRepeatCountField.setEnabled(count > 1);
myRepeatCb.setSelectedItem(configuration.getRepeatMode());
myModel.reset(configuration);
myChangeListLabeledComponent.getComponent().setSelectedItem(configuration.getPersistentData().getChangeList());
myCommonJavaParameters.reset(configuration);
getModuleSelector().reset(configuration);
final TestSearchScope scope = configuration.getPersistentData().getScope();
if (scope == TestSearchScope.SINGLE_MODULE) {
mySingleModuleScope.setSelected(true);
}
else if (scope == TestSearchScope.MODULE_WITH_DEPENDENCIES) {
myModuleWDScope.setSelected(true);
}
else {
myWholeProjectScope.setSelected(true);
}
myJrePathEditor
.setPathOrName(configuration.getAlternativeJrePath(), configuration.isAlternativeJrePathEnabled());
myForkCb.setSelectedItem(configuration.getForkMode());
}
private void changePanel () {
String selectedItem = (String)myForkCb.getSelectedItem();
if (selectedItem == null) {
selectedItem = JUnitConfiguration.FORK_NONE;
}
final Integer selectedType = (Integer)myTypeChooser.getSelectedItem();
if (selectedType == JUnitConfigurationModel.ALL_IN_PACKAGE) {
myPackagePanel.setVisible(true);
myScopesPanel.setVisible(true);
myPattern.setVisible(false);
myClass.setVisible(false);
myCategory.setVisible(false);
myMethod.setVisible(false);
myDir.setVisible(false);
myChangeListLabeledComponent.setVisible(false);
myForkCb.setEnabled(true);
myForkCb.setModel(new DefaultComboBoxModel(FORK_MODE_ALL));
myForkCb.setSelectedItem(selectedItem);
} else if (selectedType == JUnitConfigurationModel.DIR) {
myPackagePanel.setVisible(false);
myScopesPanel.setVisible(false);
myDir.setVisible(true);
myPattern.setVisible(false);
myClass.setVisible(false);
myCategory.setVisible(false);
myChangeListLabeledComponent.setVisible(false);
myMethod.setVisible(false);
myForkCb.setEnabled(true);
myForkCb.setModel(new DefaultComboBoxModel(FORK_MODE_ALL));
myForkCb.setSelectedItem(selectedItem);
}
else if (selectedType == JUnitConfigurationModel.CLASS) {
myPackagePanel.setVisible(false);
myScopesPanel.setVisible(false);
myPattern.setVisible(false);
myDir.setVisible(false);
myClass.setVisible(true);
myCategory.setVisible(false);
myChangeListLabeledComponent.setVisible(false);
myMethod.setVisible(false);
myForkCb.setEnabled(true);
myForkCb.setModel(getForkModelBasedOnRepeat());
myForkCb.setSelectedItem(selectedItem != JUnitConfiguration.FORK_KLASS ? selectedItem : JUnitConfiguration.FORK_METHOD);
}
else if (selectedType == JUnitConfigurationModel.METHOD || selectedType == JUnitConfigurationModel.BY_SOURCE_POSITION){
myPackagePanel.setVisible(false);
myScopesPanel.setVisible(false);
myPattern.setVisible(false);
myDir.setVisible(false);
myClass.setVisible(true);
myCategory.setVisible(false);
myMethod.setVisible(true);
myChangeListLabeledComponent.setVisible(false);
myForkCb.setEnabled(false);
myForkCb.setSelectedItem(JUnitConfiguration.FORK_NONE);
} else if (selectedType == JUnitConfigurationModel.CATEGORY) {
myPackagePanel.setVisible(false);
myScopesPanel.setVisible(true);
myDir.setVisible(false);
myPattern.setVisible(false);
myClass.setVisible(false);
myCategory.setVisible(true);
myMethod.setVisible(false);
myChangeListLabeledComponent.setVisible(false);
myForkCb.setEnabled(true);
myForkCb.setModel(new DefaultComboBoxModel(FORK_MODE_ALL));
myForkCb.setSelectedItem(selectedItem);
}
else if (selectedType == JUnitConfigurationModel.BY_SOURCE_CHANGES) {
myPackagePanel.setVisible(false);
myScopesPanel.setVisible(false);
myDir.setVisible(false);
myPattern.setVisible(false);
myClass.setVisible(false);
myCategory.setVisible(false);
myMethod.setVisible(false);
myChangeListLabeledComponent.setVisible(true);
myForkCb.setEnabled(true);
myForkCb.setModel(new DefaultComboBoxModel(FORK_MODE_ALL));
myForkCb.setSelectedItem(selectedItem);
}
else {
myPackagePanel.setVisible(false);
myScopesPanel.setVisible(true);
myPattern.setVisible(true);
myDir.setVisible(false);
myClass.setVisible(false);
myCategory.setVisible(false);
myMethod.setVisible(true);
myChangeListLabeledComponent.setVisible(false);
myForkCb.setEnabled(true);
myForkCb.setModel(new DefaultComboBoxModel(FORK_MODE_ALL));
myForkCb.setSelectedItem(selectedItem);
}
}
private DefaultComboBoxModel getForkModelBasedOnRepeat() {
return new DefaultComboBoxModel(RepeatCount.ONCE.equals(myRepeatCb.getSelectedItem()) ? FORK_MODE : FORK_MODE_ALL);
}
public ModulesComboBox getModulesComponent() {
return myModule.getComponent();
}
public ConfigurationModuleSelector getModuleSelector() {
return myModuleSelector;
}
private void installDocuments() {
for (int i = 0; i < myTestLocations.length; i++) {
final LabeledComponent testLocation = getTestLocation(i);
final JComponent component = testLocation.getComponent();
final ComponentWithBrowseButton field;
Object document;
if (component instanceof TextFieldWithBrowseButton) {
field = (TextFieldWithBrowseButton)component;
document = new PlainDocument();
((TextFieldWithBrowseButton)field).getTextField().setDocument((Document)document);
} else if (component instanceof EditorTextFieldWithBrowseButton) {
field = (ComponentWithBrowseButton)component;
document = ((EditorTextField)field.getChildComponent()).getDocument();
}
else {
field = myPatternTextField;
document = new PlainDocument();
((TextFieldWithBrowseButton)field).getTextField().setDocument((Document)document);
}
myBrowsers[i].setField(field);
if (myBrowsers[i] instanceof MethodBrowser) {
final EditorTextField childComponent = (EditorTextField)field.getChildComponent();
((MethodBrowser)myBrowsers[i]).installCompletion(childComponent);
document = childComponent.getDocument();
}
myModel.setJUnitDocument(i, document);
}
}
public LabeledComponent getTestLocation(final int index) {
return myTestLocations[index];
}
private void createUIComponents() {
myPackage = new LabeledComponent<>();
myPackage.setComponent(new EditorTextFieldWithBrowseButton(myProject, false));
myClass = new LabeledComponent<>();
final TestClassBrowser classBrowser = new TestClassBrowser(myProject);
myClass.setComponent(new EditorTextFieldWithBrowseButton(myProject, true, new JavaCodeFragment.VisibilityChecker() {
@Override
public Visibility isDeclarationVisible(PsiElement declaration, PsiElement place) {
try {
if (declaration instanceof PsiClass && (classBrowser.getFilter().isAccepted(((PsiClass)declaration)) || classBrowser.findClass(((PsiClass)declaration).getQualifiedName()) != null && place.getParent() != null)) {
return Visibility.VISIBLE;
}
}
catch (ClassBrowser.NoFilterException e) {
return Visibility.NOT_VISIBLE;
}
return Visibility.NOT_VISIBLE;
}
}));
myCategory = new LabeledComponent<>();
myCategory.setComponent(new EditorTextFieldWithBrowseButton(myProject, true, new JavaCodeFragment.VisibilityChecker() {
@Override
public Visibility isDeclarationVisible(PsiElement declaration, PsiElement place) {
if (declaration instanceof PsiClass) {
return Visibility.VISIBLE;
}
return Visibility.NOT_VISIBLE;
}
}));
myMethod = new LabeledComponent<>();
final EditorTextFieldWithBrowseButton textFieldWithBrowseButton = new EditorTextFieldWithBrowseButton(myProject, true,
JavaCodeFragment.VisibilityChecker.EVERYTHING_VISIBLE,
PlainTextLanguage.INSTANCE.getAssociatedFileType());
myMethod.setComponent(textFieldWithBrowseButton);
}
@Override
public JComponent getAnchor() {
return anchor;
}
@Override
public void setAnchor(JComponent anchor) {
this.anchor = anchor;
mySearchForTestsLabel.setAnchor(anchor);
myTestLabel.setAnchor(anchor);
myClass.setAnchor(anchor);
myDir.setAnchor(anchor);
myMethod.setAnchor(anchor);
myPattern.setAnchor(anchor);
myPackage.setAnchor(anchor);
myCategory.setAnchor(anchor);
myChangeListLabeledComponent.setAnchor(anchor);
}
public void onTypeChanged(final int newType) {
myTypeChooser.setSelectedItem(newType);
final TIntArrayList enabledFields = ourEnabledFields.get(newType);
for (int i = 0; i < myTestLocations.length; i++)
getTestLocation(i).setEnabled(enabledFields.contains(i));
/*if (newType == JUnitConfigurationModel.PATTERN) {
myModule.setEnabled(false);
} else */if (newType != JUnitConfigurationModel.ALL_IN_PACKAGE &&
newType != JUnitConfigurationModel.PATTERN &&
newType != JUnitConfigurationModel.CATEGORY) {
myModule.setEnabled(true);
}
else {
onScopeChanged();
}
}
private void onScopeChanged() {
final Integer selectedItem = (Integer)myTypeChooser.getSelectedItem();
final boolean allInPackageAllInProject = (selectedItem == JUnitConfigurationModel.ALL_IN_PACKAGE ||
selectedItem == JUnitConfigurationModel.PATTERN ||
selectedItem == JUnitConfigurationModel.CATEGORY) && myWholeProjectScope.isSelected();
myModule.setEnabled(!allInPackageAllInProject);
if (allInPackageAllInProject) {
myModule.getComponent().setSelectedItem(null);
}
}
private String getClassName() {
return ((LabeledComponent<EditorTextFieldWithBrowseButton>)getTestLocation(JUnitConfigurationModel.CLASS)).getComponent().getText();
}
private void setPackage(final PsiPackage aPackage) {
if (aPackage == null) return;
((LabeledComponent<EditorTextFieldWithBrowseButton>)getTestLocation(JUnitConfigurationModel.ALL_IN_PACKAGE)).getComponent()
.setText(aPackage.getQualifiedName());
}
@NotNull
public JComponent createEditor() {
return myWholePanel;
}
private void applyHelpersTo(final JUnitConfiguration currentState) {
myCommonJavaParameters.applyTo(currentState);
getModuleSelector().applyTo(currentState);
}
private static class PackageChooserActionListener extends BrowseModuleValueActionListener {
public PackageChooserActionListener(final Project project) {
super(project);
}
protected String showDialog() {
final PackageChooserDialog dialog = new PackageChooserDialog(ExecutionBundle.message("choose.package.dialog.title"), getProject());
dialog.show();
final PsiPackage aPackage = dialog.getSelectedPackage();
return aPackage != null ? aPackage.getQualifiedName() : null;
}
}
private class TestsChooserActionListener extends TestClassBrowser {
public TestsChooserActionListener(final Project project) {
super(project);
}
@Override
protected void onClassChoosen(PsiClass psiClass) {
final JTextField textField = myPatternTextField.getTextField();
final String text = textField.getText();
textField.setText(text + (text.length() > 0 ? "||" : "") + psiClass.getQualifiedName());
}
@Override
protected ClassFilter.ClassFilterWithScope getFilter() throws NoFilterException {
try {
return TestClassFilter.create(SourceScope.wholeProject(getProject()), null);
}
catch (JUnitUtil.NoJUnitException ignore) {
throw new NoFilterException(new MessagesEx.MessageInfo(getProject(),
ignore.getMessage(),
ExecutionBundle.message("cannot.browse.test.inheritors.dialog.title")));
}
}
@Override
public void actionPerformed(ActionEvent e) {
showDialog();
}
}
private class TestClassBrowser extends ClassBrowser {
public TestClassBrowser(final Project project) {
super(project, ExecutionBundle.message("choose.test.class.dialog.title"));
}
protected void onClassChoosen(final PsiClass psiClass) {
setPackage(JUnitUtil.getContainingPackage(psiClass));
}
protected PsiClass findClass(final String className) {
return getModuleSelector().findClass(className);
}
protected ClassFilter.ClassFilterWithScope getFilter() throws NoFilterException {
final ConfigurationModuleSelector moduleSelector = getModuleSelector();
final Module module = moduleSelector.getModule();
if (module == null) {
throw NoFilterException.moduleDoesntExist(moduleSelector);
}
final ClassFilter.ClassFilterWithScope classFilter;
try {
final JUnitConfiguration configurationCopy =
new JUnitConfiguration(ExecutionBundle.message("default.junit.configuration.name"), getProject(),
JUnitConfigurationType.getInstance().getConfigurationFactories()[0]);
applyEditorTo(configurationCopy);
classFilter = TestClassFilter
.create(SourceScope.modulesWithDependencies(configurationCopy.getModules()), configurationCopy.getConfigurationModule().getModule());
}
catch (JUnitUtil.NoJUnitException e) {
throw NoFilterException.noJUnitInModule(module);
}
return classFilter;
}
}
private class CategoryBrowser extends ClassBrowser {
public CategoryBrowser(Project project) {
super(project, "Category Interface");
}
protected PsiClass findClass(final String className) {
return myModuleSelector.findClass(className);
}
protected ClassFilter.ClassFilterWithScope getFilter() throws NoFilterException {
final Module module = myModuleSelector.getModule();
final GlobalSearchScope scope;
if (module == null) {
scope = GlobalSearchScope.allScope(myProject);
}
else {
scope = GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module);
}
return new ClassFilter.ClassFilterWithScope() {
public GlobalSearchScope getScope() {
return scope;
}
public boolean isAccepted(final PsiClass aClass) {
return true;
}
};
}
@Override
protected void onClassChoosen(PsiClass psiClass) {
((LabeledComponent<EditorTextFieldWithBrowseButton>)getTestLocation(JUnitConfigurationModel.CATEGORY)).getComponent()
.setText(psiClass.getQualifiedName());
}
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.zetasql;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.zetasql.TypeTestBase.checkSerializable;
import static com.google.zetasql.TypeTestBase.checkTypeSerializationAndDeserialization;
import static com.google.zetasql.TypeTestBase.checkTypeSerializationAndDeserializationExistingPools;
import static com.google.zetasql.TypeTestBase.getDescriptorPoolWithTypeProtoAndTypeKind;
import com.google.common.collect.Lists;
import com.google.common.testing.EqualsTester;
import com.google.protobuf.DescriptorProtos.FieldDescriptorProto;
import com.google.protobuf.DescriptorProtos.FieldOptions;
import com.google.protobuf.DescriptorProtos.FileDescriptorSet;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.zetasql.ZetaSQLType.ProtoTypeProto;
import com.google.zetasql.ZetaSQLType.TypeKind;
import com.google.zetasql.ZetaSQLType.TypeProto;
import com.google.zetasql.TypeAnnotationProto.FieldFormat;
import com.google.zetasqltest.TestSchemaProto.FieldFormatsProto;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class ProtoTypeTest {
@Test
public void testSerializationAndDeserialization() {
TypeFactory factory = TypeFactory.nonUniqueNames();
checkTypeSerializationAndDeserialization(factory.createProtoType(TypeProto.class));
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ProtoType type = factory.createProtoType(pool.findMessageTypeByName("zetasql.TypeProto"));
checkTypeSerializationAndDeserialization(type);
List<DescriptorPool> pools = Lists.newArrayList(pool);
checkTypeSerializationAndDeserializationExistingPools(type, pools);
ProtoType type2 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ArrayTypeProto"));
checkTypeSerializationAndDeserialization(type2);
ProtoType type3 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ProtoTypeProto"));
checkTypeSerializationAndDeserialization(type3);
}
@Test
public void testSerializeationAndDeserializationMultipleTypesWithSharedPools() {
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ZetaSQLDescriptorPool pool2 = getDescriptorPoolWithTypeProtoAndTypeKind();
TypeFactory factory = TypeFactory.nonUniqueNames();
List<Type> types = new ArrayList<>();
// some proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.StructTypeProto")));
// another proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from different pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from same pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// and an enum
types.add(factory.createEnumType(pool.findEnumTypeByName("zetasql.TypeKind")));
// add some simple types
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL));
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_DOUBLE));
FileDescriptorSetsBuilder fileDescriptorSetsBuilder = new FileDescriptorSetsBuilder();
List<TypeProto> protos = new ArrayList<>();
for (Type type : types) {
TypeProto.Builder builder = TypeProto.newBuilder();
type.serialize(builder, fileDescriptorSetsBuilder);
protos.add(builder.build());
}
List<FileDescriptorSet> sets = fileDescriptorSetsBuilder.build();
// total number of FileDescriptorSet serialized:
// matches the number of DescriptorPools used above.
assertThat(sets).hasSize(2);
List<DescriptorPool> pools = new ArrayList<>();
for (FileDescriptorSet fileDescriptorSet : sets) {
pool = new ZetaSQLDescriptorPool();
pool.importFileDescriptorSet(fileDescriptorSet);
pools.add(pool);
}
assertThat(protos).hasSize(types.size());
for (TypeProto proto : protos) {
// type protos are not self-contained
assertThat(proto.getFileDescriptorSetCount()).isEqualTo(0);
// but can be deserialized with existing pools
Type type = factory.deserialize(proto, pools);
checkTypeSerializationAndDeserialization(type);
}
}
@Test
public void testSerializable() {
TypeFactory factory = TypeFactory.nonUniqueNames();
checkTypeSerializationAndDeserialization(factory.createProtoType(TypeProto.class));
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ProtoType type = factory.createProtoType(pool.findMessageTypeByName("zetasql.TypeProto"));
checkSerializable(type);
ProtoType type2 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ArrayTypeProto"));
checkSerializable(type2);
ProtoType type3 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ProtoTypeProto"));
checkSerializable(type3);
}
@Test
public void testSerializeableMultipleTypesWithSharedPools() {
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ZetaSQLDescriptorPool pool2 = getDescriptorPoolWithTypeProtoAndTypeKind();
TypeFactory factory = TypeFactory.nonUniqueNames();
List<Type> types = new ArrayList<>();
// some proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.StructTypeProto")));
// another proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from different pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from same pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// and an enum
types.add(factory.createEnumType(pool.findEnumTypeByName("zetasql.TypeKind")));
// add some simple types
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL));
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_DOUBLE));
FileDescriptorSetsBuilder fileDescriptorSetsBuilder = new FileDescriptorSetsBuilder();
List<TypeProto> protos = new ArrayList<>();
for (Type type : types) {
TypeProto.Builder builder = TypeProto.newBuilder();
type.serialize(builder, fileDescriptorSetsBuilder);
protos.add(builder.build());
}
List<FileDescriptorSet> sets = fileDescriptorSetsBuilder.build();
// total number of FileDescriptorSet serialized:
// matches the number of DescriptorPools used above.
assertThat(sets).hasSize(2);
List<DescriptorPool> pools = new ArrayList<>();
for (FileDescriptorSet fileDescriptorSet : sets) {
pool = new ZetaSQLDescriptorPool();
pool.importFileDescriptorSet(fileDescriptorSet);
pools.add(pool);
}
assertThat(protos).hasSize(types.size());
for (TypeProto proto : protos) {
// type protos are not self-contained
assertThat(proto.getFileDescriptorSetCount()).isEqualTo(0);
// but can be deserialized with existing pools
Type type = factory.deserialize(proto, pools);
checkSerializable(type);
}
}
@Test
public void testEquivalent() {
TypeFactory factory = TypeFactory.nonUniqueNames();
ProtoType proto1 = factory.createProtoType(TypeProto.class);
ProtoType proto2 =
factory.createProtoType(
getDescriptorPoolWithTypeProtoAndTypeKind()
.findMessageTypeByName("zetasql.TypeProto"));
ProtoType proto3 = factory.createProtoType(FieldDescriptorProto.class);
assertThat(proto1.equivalent(proto1)).isTrue();
assertThat(proto1.equivalent(proto2)).isTrue();
assertThat(proto1.equivalent(proto3)).isFalse();
assertThat(proto2.equivalent(proto1)).isTrue();
assertThat(proto2.equivalent(proto2)).isTrue();
assertThat(proto2.equivalent(proto3)).isFalse();
assertThat(proto3.equivalent(proto1)).isFalse();
assertThat(proto3.equivalent(proto2)).isFalse();
assertThat(proto3.equivalent(proto3)).isTrue();
assertThat(proto1.equivalent(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL))).isFalse();
}
private void verifyFormatAnnotation(FieldFormat.Format format, String fieldName) {
FieldDescriptor field = FieldFormatsProto.getDescriptor().findFieldByName(fieldName);
assertThat(ProtoType.getFormatAnnotation(field)).isEqualTo(format);
assertThat(ProtoType.hasFormatAnnotation(field))
.isEqualTo(format != FieldFormat.Format.DEFAULT_FORMAT);
}
@Test
public void testFormatAnnotations() {
verifyFormatAnnotation(FieldFormat.Format.DEFAULT_FORMAT, "no_annotation");
verifyFormatAnnotation(FieldFormat.Format.DATE, "date");
verifyFormatAnnotation(FieldFormat.Format.DATE, "date_64");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "date_decimal");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "date_decimal_64");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "date_decimal_encoding");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "seconds");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "millis");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "micros");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "seconds_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "millis_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "micros_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "micros_u64");
verifyFormatAnnotation(FieldFormat.Format.DATE, "repeated_date");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "repeated_date_decimal");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "repeated_seconds");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "repeated_millis");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "repeated_micros");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "repeated_seconds_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "repeated_millis_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "repeated_micros_format");
}
@Test
public void testEquals() {
TypeFactory factory = TypeFactory.nonUniqueNames();
ProtoType proto1 = factory.createProtoType(TypeProto.class);
ProtoType proto2 =
factory.createProtoType(
getDescriptorPoolWithTypeProtoAndTypeKind()
.findMessageTypeByName("zetasql.TypeProto"));
ProtoType proto3 = factory.createProtoType(FieldDescriptorProto.class);
new EqualsTester().addEqualityGroup(proto1).testEquals();
assertThat(proto1.equals(proto2)).isFalse();
assertThat(proto1.equals(proto3)).isFalse();
assertThat(proto2.equals(proto1)).isFalse();
new EqualsTester().addEqualityGroup(proto2).testEquals();
assertThat(proto2.equals(proto3)).isFalse();
assertThat(proto3.equals(proto1)).isFalse();
assertThat(proto3.equals(proto2)).isFalse();
new EqualsTester().addEqualityGroup(proto3).testEquals();
assertThat(proto1.equals(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL))).isFalse();
}
@Test
public void testAsProto() {
TypeFactory factory = TypeFactory.nonUniqueNames();
ArrayType array =
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT32));
EnumType enumType = factory.createEnumType(TypeKind.class);
ProtoType proto = factory.createProtoType(TypeProto.class);
List<StructType.StructField> fields = new ArrayList<>();
fields.add(new StructType.StructField("", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)));
fields.add(new StructType.StructField("a", TypeFactory.createSimpleType(TypeKind.TYPE_INT32)));
StructType struct = TypeFactory.createStructType(fields);
assertThat(proto.asProto()).isEqualTo(proto);
assertThat(array.asProto()).isNull();
assertThat(enumType.asProto()).isNull();
assertThat(struct.asProto()).isNull();
assertThat(TypeFactory.createSimpleType(TypeKind.TYPE_INT32).asProto()).isNull();
}
@Test
public void testClassAndProtoSize() {
assertWithMessage(
"The number of fields of ProtoTypeProto has changed, "
+ "please also update the serialization code accordingly.")
.that(ProtoTypeProto.getDescriptor().getFields())
.hasSize(4);
assertWithMessage(
"The number of fields in ProtoType class has changed, "
+ "please also update the proto and serialization code accordingly.")
.that(TestUtil.getNonStaticFieldCount(ProtoType.class))
.isEqualTo(2);
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.grid.internal.utils.configuration;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.Expose;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import com.beust.jcommander.Parameter;
import org.openqa.grid.common.exception.GridConfigurationException;
import org.openqa.grid.internal.listeners.Prioritizer;
import org.openqa.grid.internal.utils.CapabilityMatcher;
import org.openqa.grid.internal.utils.DefaultCapabilityMatcher;
import org.openqa.grid.internal.utils.configuration.converters.StringToClassConverter;
import org.openqa.grid.internal.utils.configuration.validators.FileExistsValueValidator;
import java.io.IOException;
public class GridHubConfiguration extends GridConfiguration {
public static final String DEFAULT_HUB_CONFIG_FILE = "defaults/DefaultHub.json";
/*
* IMPORTANT - Keep these constant values in sync with the ones specified in
* 'defaults/DefaultHub.json' -- if for no other reasons documentation & consistency.
*/
/**
* Default hub role
*/
static final String DEFAULT_ROLE = "hub";
/**
* Default hub port
*/
static final Integer DEFAULT_PORT = 4444;
/**
* Default hub cleanup cycle
*/
static final Integer DEFAULT_CLEANUP_CYCLE = 5000;
/**
* Default hub new session wait timeout
*/
static final Integer DEFAULT_NEW_SESSION_WAIT_TIMEOUT = -1;
/**
* Default hub throw on capability not present toggle
*/
static final Boolean DEFAULT_THROW_ON_CAPABILITY_NOT_PRESENT_TOGGLE = true;
/**
* Default hub GridRegistry implementation to use
*/
static final String DEFAULT_HUB_REGISTRY_CLASS = "org.openqa.grid.internal.DefaultGridRegistry";
/*
* config parameters which do not serialize or de-serialize
*/
/**
* Hub specific json config file to use. Defaults to {@code null}.
*/
@Parameter(
names = "-hubConfig",
description = "<String> filename: a JSON file (following grid2 format), which defines the hub properties",
validateValueWith = FileExistsValueValidator.class
)
public String hubConfig;
/*
* config parameters which serialize and deserialize to/from json
*/
/**
* Capability matcher to use. Defaults to {@link DefaultCapabilityMatcher}
*/
@Expose
@Parameter(
names = { "-matcher", "-capabilityMatcher" },
description = "<String> class name : a class implementing the CapabilityMatcher interface. Specifies the logic the hub will follow to define whether a request can be assigned to a node. For example, if you want to have the matching process use regular expressions instead of exact match when specifying browser version. ALL nodes of a grid ecosystem would then use the same capabilityMatcher, as defined here.",
converter = StringToClassConverter.CapabilityMatcherStringConverter.class
)
public CapabilityMatcher capabilityMatcher = new DefaultCapabilityMatcher();
/**
* Timeout for new session requests. Defaults to unlimited.
*/
@Expose
@Parameter(
names = "-newSessionWaitTimeout",
description = "<Integer> in ms : The time after which a new test waiting for a node to become available will time out. When that happens, the test will throw an exception before attempting to start a browser. An unspecified, zero, or negative value means wait indefinitely."
)
public Integer newSessionWaitTimeout = DEFAULT_NEW_SESSION_WAIT_TIMEOUT;
/**
* Prioritizer for new honoring session requests based on some priority. Defaults to {@code null}.
*/
@Expose
@Parameter(
names = "-prioritizer",
description = "<String> class name : a class implementing the Prioritizer interface. Specify a custom Prioritizer if you want to sort the order in which new session requests are processed when there is a queue. Default to null ( no priority = FIFO )",
converter = StringToClassConverter.PrioritizerStringConverter.class
)
public Prioritizer prioritizer;
/**
* Whether to throw an Exception when there are no capabilities available that match the request. Defaults to {@code true}.
*/
@Expose
@Parameter(
names = "-throwOnCapabilityNotPresent",
description = "<Boolean> true or false : If true, the hub will reject all test requests if no compatible proxy is currently registered. If set to false, the request will queue until a node supporting the capability is registered with the grid.",
arity = 1
)
public Boolean throwOnCapabilityNotPresent = DEFAULT_THROW_ON_CAPABILITY_NOT_PRESENT_TOGGLE;
@Expose
@Parameter(
names = "-registry",
description = "<String> class name : a class implementing the GridRegistry interface. Specifies the registry the hub will use."
)
public String registry = DEFAULT_HUB_REGISTRY_CLASS;
/**
* Creates a new configuration using the default values.
*/
public GridHubConfiguration() {
// overrides values set by base classes
role = DEFAULT_ROLE;
port = DEFAULT_PORT;
cleanUpCycle = DEFAULT_CLEANUP_CYCLE;
}
/**
* @param filePath hub config json file to load configuration from
*/
public static GridHubConfiguration loadFromJSON(String filePath) {
return loadFromJSON(loadJSONFromResourceOrFile(filePath));
}
/**
* @param json JsonObject to load configuration from
*/
public static GridHubConfiguration loadFromJSON(JsonObject json) {
try {
GsonBuilder builder = new GsonBuilder();
GridHubConfiguration.staticAddJsonTypeAdapter(builder);
return builder.excludeFieldsWithoutExposeAnnotation().create()
.fromJson(json, GridHubConfiguration.class);
} catch (Throwable e) {
throw new GridConfigurationException("Error with the JSON of the config : " + e.getMessage(),
e);
}
}
/**
* Merge this configuration with the specified {@link GridNodeConfiguration}
* @param other
*/
public void merge(GridNodeConfiguration other) {
super.merge(other);
}
/**
* Merge this configuration with the specified {@link GridHubConfiguration}
* @param other
*/
public void merge(GridHubConfiguration other) {
if (other == null) {
return;
}
super.merge(other);
if (isMergeAble(other.capabilityMatcher, capabilityMatcher)) {
capabilityMatcher = other.capabilityMatcher;
}
if (isMergeAble(other.newSessionWaitTimeout, newSessionWaitTimeout)) {
newSessionWaitTimeout = other.newSessionWaitTimeout;
}
if (isMergeAble(other.prioritizer, prioritizer)) {
prioritizer = other.prioritizer;
}
if (isMergeAble(other.throwOnCapabilityNotPresent, throwOnCapabilityNotPresent)) {
throwOnCapabilityNotPresent = other.throwOnCapabilityNotPresent;
}
if (isMergeAble(other.registry, registry)) {
registry = other.registry;
}
}
@Override
public String toString(String format) {
StringBuilder sb = new StringBuilder();
sb.append(super.toString(format));
sb.append(toString(format, "hubConfig", hubConfig));
sb.append(toString(format, "capabilityMatcher", capabilityMatcher.getClass().getCanonicalName()));
sb.append(toString(format, "newSessionWaitTimeout", newSessionWaitTimeout));
sb.append(toString(format, "prioritizer", prioritizer != null ? prioritizer.getClass().getCanonicalName(): null));
sb.append(toString(format, "throwOnCapabilityNotPresent", throwOnCapabilityNotPresent));
sb.append(toString(format, "registry", registry));
return sb.toString();
}
@Override
protected void addJsonTypeAdapter(GsonBuilder builder) {
super.addJsonTypeAdapter(builder);
GridHubConfiguration.staticAddJsonTypeAdapter(builder);
}
protected static void staticAddJsonTypeAdapter(GsonBuilder builder) {
builder.registerTypeAdapter(CapabilityMatcher.class, new CapabilityMatcherAdapter().nullSafe());
builder.registerTypeAdapter(Prioritizer.class, new PrioritizerAdapter().nullSafe());
}
protected static class SimpleClassNameAdapter<T> extends TypeAdapter<T> {
@Override
public void write(JsonWriter out, T value) throws IOException {
out.value(value.getClass().getCanonicalName());
}
@Override
public T read(JsonReader in) throws IOException {
String value = in.nextString();
try {
return (T) Class.forName(value).newInstance();
} catch (Exception e) {
throw new RuntimeException(String.format("String %s could not be coerced to class: %s", value, Class.class.getName()), e);
}
}
}
protected static class CapabilityMatcherAdapter extends SimpleClassNameAdapter<CapabilityMatcher> {
}
protected static class PrioritizerAdapter extends SimpleClassNameAdapter<Prioritizer> {
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.framework.plugintool.dialog;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent.EventType;
import docking.EmptyBorderToggleButton;
import docking.widgets.HyperlinkComponent;
import docking.widgets.checkbox.GCheckBox;
import docking.widgets.label.*;
import ghidra.framework.plugintool.PluginConfigurationModel;
import ghidra.framework.plugintool.PluginTool;
import ghidra.framework.plugintool.util.*;
import ghidra.util.HTMLUtilities;
import ghidra.util.exception.AssertException;
import ghidra.util.layout.HorizontalLayout;
import ghidra.util.layout.VerticalLayout;
import resources.ResourceManager;
public class PluginManagerComponent extends JPanel implements Scrollable {
private final PluginTool tool;
private PluginConfigurationModel model;
private List<PluginPackageComponent> packageComponentList = new ArrayList<>();
PluginManagerComponent(PluginTool tool, PluginConfigurationModel model) {
super(new VerticalLayout(2));
setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
setBackground(Color.WHITE);
this.tool = tool;
this.model = model;
model.setChangeCallback(this::updateCheckboxes);
List<PluginPackage> pluginPackages = model.getPluginPackages();
for (PluginPackage pluginPackage : pluginPackages) {
PluginPackageComponent comp = new PluginPackageComponent(pluginPackage);
packageComponentList.add(comp);
add(comp);
}
}
private void updateCheckboxes() {
for (PluginPackageComponent comp : packageComponentList) {
comp.updateCheckBoxState();
}
}
void managePlugins(PluginPackage pluginPackage) {
List<PluginDescription> descriptons = model.getPluginDescriptions(pluginPackage);
PluginInstallerDialog pluginInstallerDialog = new PluginInstallerDialog(
"Configure " + pluginPackage.getName() + " Plugins", tool, model, descriptons);
tool.showDialog(pluginInstallerDialog);
}
void manageAllPlugins() {
PluginInstallerDialog pluginTableDialog = new PluginInstallerDialog("Configure All Plugins",
tool, model, model.getAllPluginDescriptions());
tool.showDialog(pluginTableDialog);
}
PluginConfigurationModel getModel() {
return model;
}
int getPackageCount() {
return packageComponentList.size();
}
int getPluginCount(PluginPackage pluginPackage) {
return model.getPluginDescriptions(pluginPackage).size();
}
void selectPluginPackage(PluginPackage pluginPackage, boolean selected) {
if (selected) {
model.addSupportedPlugins(pluginPackage);
}
else {
model.removeAllPlugins(pluginPackage);
}
}
boolean isAddAllCheckBoxEnabled(PluginPackage pluginPackage) {
for (PluginPackageComponent ppc : packageComponentList) {
if (ppc.pluginPackage.equals(pluginPackage)) {
return ppc.checkBox.isEnabled();
}
}
throw new AssertException("No checkbox found for " + pluginPackage);
}
//==================================================================================================
// Inner Classes
//==================================================================================================
private class PluginPackageComponent extends JPanel {
private final Color BG = Color.white;
private final PluginPackage pluginPackage;
private final GCheckBox checkBox;
PluginPackageComponent(PluginPackage pluginPackage) {
super(new BorderLayout());
setBackground(BG);
this.pluginPackage = pluginPackage;
this.checkBox = new GCheckBox();
initizalizeCheckBoxSection();
initializeLabelSection();
initializeDescriptionSection();
setBorder(BorderFactory.createLineBorder(Color.DARK_GRAY));
updateCheckBoxState();
}
private void initizalizeCheckBoxSection() {
final JPanel checkboxPanel = new JPanel(new HorizontalLayout(0));
checkboxPanel.setBackground(BG);
checkBox.addActionListener(
e -> selectPluginPackage(pluginPackage, checkBox.isSelected()));
if (model.hasOnlyUnstablePlugins(pluginPackage)) {
checkBox.setEnabled(false);
}
checkBox.setBackground(BG);
checkboxPanel.add(Box.createHorizontalStrut(10));
checkboxPanel.add(checkBox);
checkboxPanel.add(Box.createHorizontalStrut(10));
final JLabel iconLabel =
new GIconLabel(ResourceManager.getScaledIcon(pluginPackage.getIcon(), 32, 32, 32));
iconLabel.setBackground(BG);
checkboxPanel.add(iconLabel);
checkboxPanel.add(Box.createHorizontalStrut(10));
checkboxPanel.setPreferredSize(new Dimension(84, 70));
add(checkboxPanel, BorderLayout.WEST);
}
private void initializeLabelSection() {
final JPanel centerPanel = new JPanel(new GridBagLayout());
GridBagConstraints gbc = new GridBagConstraints();
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.weightx = 1.0;
centerPanel.setBackground(BG);
final JPanel labelPanel = new JPanel(new VerticalLayout(3));
labelPanel.setBackground(BG);
final GLabel nameLabel = new GLabel(pluginPackage.getName());
nameLabel.setFont(nameLabel.getFont().deriveFont(18f));
nameLabel.setForeground(Color.BLACK);
labelPanel.add(nameLabel);
final HyperlinkComponent configureHyperlink = createConfigureHyperlink();
labelPanel.add(configureHyperlink);
labelPanel.setBorder(BorderFactory.createEmptyBorder(0, 25, 0, 40));
centerPanel.add(labelPanel, gbc);
add(centerPanel);
}
private HyperlinkComponent createConfigureHyperlink() {
final HyperlinkComponent configureHyperlink =
new HyperlinkComponent("<html> <a href=\"Configure\">Configure</a>");
configureHyperlink.addHyperlinkListener("Configure", e -> {
if (e.getEventType() == EventType.ACTIVATED) {
managePlugins(PluginPackageComponent.this.pluginPackage);
}
});
configureHyperlink.setBackground(BG);
return configureHyperlink;
}
private String enchanceDescription(final String text) {
return String.format("<html><body style='width: 300px'>%s</body></html>", text);
}
private void initializeDescriptionSection() {
final String htmlDescription = enchanceDescription(pluginPackage.getDescription());
final JLabel descriptionlabel = new GHtmlLabel(htmlDescription);
descriptionlabel.setForeground(Color.GRAY);
descriptionlabel.setBorder(BorderFactory.createEmptyBorder(5, 0, 0, 0));
descriptionlabel.setVerticalAlignment(SwingConstants.TOP);
descriptionlabel.setToolTipText(
HTMLUtilities.toWrappedHTML(pluginPackage.getDescription(), 80));
add(descriptionlabel, BorderLayout.EAST);
}
void updateCheckBoxState() {
checkBox.setSelected(
model.getPackageState(pluginPackage) != PluginPackageState.NO_PLUGINS_LOADED);
}
}
static class MyToggleButton extends EmptyBorderToggleButton {
public MyToggleButton(Icon icon) {
super(icon);
}
@Override
public void setIcon(Icon newIcon) {
Icon scaledIcon = ResourceManager.getScaledIcon(newIcon, 32, 32, 32);
doSetIcon(scaledIcon);
}
}
@Override
public Dimension getPreferredScrollableViewportSize() {
return getPreferredSize();
}
@Override
public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) {
return 50;
}
@Override
public boolean getScrollableTracksViewportHeight() {
return false;
}
@Override
public boolean getScrollableTracksViewportWidth() {
return true;
}
@Override
public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) {
return 20;
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Creative.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
/**
* A {@code Creative} represents the media for the ad being served.
*
* <p>Read more about creatives on the
* <a href="https://support.google.com/dfp_premium/answer/3185155">Ad
* Manager Help Center</a>.</p>
*/
public abstract class Creative implements java.io.Serializable {
/* The ID of the advertiser that owns the creative. This attribute
* is
* required. */
private java.lang.Long advertiserId;
/* Uniquely identifies the {@code Creative}. This value is read-only
* and is
* assigned by Google when the creative is created. This
* attribute is required
* for updates. */
private java.lang.Long id;
/* The name of the creative. This attribute is required and has
* a maximum
* length of 255 characters. */
private java.lang.String name;
/* The {@link Size} of the creative. This attribute is required
* for creation
* and then is read-only. */
private com.google.api.ads.admanager.axis.v202111.Size size;
/* The URL of the creative for previewing the media. This attribute
* is
* read-only and is assigned by Google when a creative
* is created. */
private java.lang.String previewUrl;
/* Set of policy labels detected for this creative.
* This attribute is read-only. */
private com.google.api.ads.admanager.axis.v202111.CreativePolicyViolation[] policyLabels;
/* The set of labels applied to this creative. */
private com.google.api.ads.admanager.axis.v202111.AppliedLabel[] appliedLabels;
/* The date and time this creative was last modified. */
private com.google.api.ads.admanager.axis.v202111.DateTime lastModifiedDateTime;
/* The values of the custom fields associated with this creative. */
private com.google.api.ads.admanager.axis.v202111.BaseCustomFieldValue[] customFieldValues;
/* The third party companies associated with this creative.
*
* <p>This is distinct from any associated companies
* that Google may detect programmatically. */
private com.google.api.ads.admanager.axis.v202111.ThirdPartyDataDeclaration thirdPartyDataDeclaration;
public Creative() {
}
public Creative(
java.lang.Long advertiserId,
java.lang.Long id,
java.lang.String name,
com.google.api.ads.admanager.axis.v202111.Size size,
java.lang.String previewUrl,
com.google.api.ads.admanager.axis.v202111.CreativePolicyViolation[] policyLabels,
com.google.api.ads.admanager.axis.v202111.AppliedLabel[] appliedLabels,
com.google.api.ads.admanager.axis.v202111.DateTime lastModifiedDateTime,
com.google.api.ads.admanager.axis.v202111.BaseCustomFieldValue[] customFieldValues,
com.google.api.ads.admanager.axis.v202111.ThirdPartyDataDeclaration thirdPartyDataDeclaration) {
this.advertiserId = advertiserId;
this.id = id;
this.name = name;
this.size = size;
this.previewUrl = previewUrl;
this.policyLabels = policyLabels;
this.appliedLabels = appliedLabels;
this.lastModifiedDateTime = lastModifiedDateTime;
this.customFieldValues = customFieldValues;
this.thirdPartyDataDeclaration = thirdPartyDataDeclaration;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("advertiserId", getAdvertiserId())
.add("appliedLabels", getAppliedLabels())
.add("customFieldValues", getCustomFieldValues())
.add("id", getId())
.add("lastModifiedDateTime", getLastModifiedDateTime())
.add("name", getName())
.add("policyLabels", getPolicyLabels())
.add("previewUrl", getPreviewUrl())
.add("size", getSize())
.add("thirdPartyDataDeclaration", getThirdPartyDataDeclaration())
.toString();
}
/**
* Gets the advertiserId value for this Creative.
*
* @return advertiserId * The ID of the advertiser that owns the creative. This attribute
* is
* required.
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Sets the advertiserId value for this Creative.
*
* @param advertiserId * The ID of the advertiser that owns the creative. This attribute
* is
* required.
*/
public void setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
}
/**
* Gets the id value for this Creative.
*
* @return id * Uniquely identifies the {@code Creative}. This value is read-only
* and is
* assigned by Google when the creative is created. This
* attribute is required
* for updates.
*/
public java.lang.Long getId() {
return id;
}
/**
* Sets the id value for this Creative.
*
* @param id * Uniquely identifies the {@code Creative}. This value is read-only
* and is
* assigned by Google when the creative is created. This
* attribute is required
* for updates.
*/
public void setId(java.lang.Long id) {
this.id = id;
}
/**
* Gets the name value for this Creative.
*
* @return name * The name of the creative. This attribute is required and has
* a maximum
* length of 255 characters.
*/
public java.lang.String getName() {
return name;
}
/**
* Sets the name value for this Creative.
*
* @param name * The name of the creative. This attribute is required and has
* a maximum
* length of 255 characters.
*/
public void setName(java.lang.String name) {
this.name = name;
}
/**
* Gets the size value for this Creative.
*
* @return size * The {@link Size} of the creative. This attribute is required
* for creation
* and then is read-only.
*/
public com.google.api.ads.admanager.axis.v202111.Size getSize() {
return size;
}
/**
* Sets the size value for this Creative.
*
* @param size * The {@link Size} of the creative. This attribute is required
* for creation
* and then is read-only.
*/
public void setSize(com.google.api.ads.admanager.axis.v202111.Size size) {
this.size = size;
}
/**
* Gets the previewUrl value for this Creative.
*
* @return previewUrl * The URL of the creative for previewing the media. This attribute
* is
* read-only and is assigned by Google when a creative
* is created.
*/
public java.lang.String getPreviewUrl() {
return previewUrl;
}
/**
* Sets the previewUrl value for this Creative.
*
* @param previewUrl * The URL of the creative for previewing the media. This attribute
* is
* read-only and is assigned by Google when a creative
* is created.
*/
public void setPreviewUrl(java.lang.String previewUrl) {
this.previewUrl = previewUrl;
}
/**
* Gets the policyLabels value for this Creative.
*
* @return policyLabels * Set of policy labels detected for this creative.
* This attribute is read-only.
*/
public com.google.api.ads.admanager.axis.v202111.CreativePolicyViolation[] getPolicyLabels() {
return policyLabels;
}
/**
* Sets the policyLabels value for this Creative.
*
* @param policyLabels * Set of policy labels detected for this creative.
* This attribute is read-only.
*/
public void setPolicyLabels(com.google.api.ads.admanager.axis.v202111.CreativePolicyViolation[] policyLabels) {
this.policyLabels = policyLabels;
}
public com.google.api.ads.admanager.axis.v202111.CreativePolicyViolation getPolicyLabels(int i) {
return this.policyLabels[i];
}
public void setPolicyLabels(int i, com.google.api.ads.admanager.axis.v202111.CreativePolicyViolation _value) {
this.policyLabels[i] = _value;
}
/**
* Gets the appliedLabels value for this Creative.
*
* @return appliedLabels * The set of labels applied to this creative.
*/
public com.google.api.ads.admanager.axis.v202111.AppliedLabel[] getAppliedLabels() {
return appliedLabels;
}
/**
* Sets the appliedLabels value for this Creative.
*
* @param appliedLabels * The set of labels applied to this creative.
*/
public void setAppliedLabels(com.google.api.ads.admanager.axis.v202111.AppliedLabel[] appliedLabels) {
this.appliedLabels = appliedLabels;
}
public com.google.api.ads.admanager.axis.v202111.AppliedLabel getAppliedLabels(int i) {
return this.appliedLabels[i];
}
public void setAppliedLabels(int i, com.google.api.ads.admanager.axis.v202111.AppliedLabel _value) {
this.appliedLabels[i] = _value;
}
/**
* Gets the lastModifiedDateTime value for this Creative.
*
* @return lastModifiedDateTime * The date and time this creative was last modified.
*/
public com.google.api.ads.admanager.axis.v202111.DateTime getLastModifiedDateTime() {
return lastModifiedDateTime;
}
/**
* Sets the lastModifiedDateTime value for this Creative.
*
* @param lastModifiedDateTime * The date and time this creative was last modified.
*/
public void setLastModifiedDateTime(com.google.api.ads.admanager.axis.v202111.DateTime lastModifiedDateTime) {
this.lastModifiedDateTime = lastModifiedDateTime;
}
/**
* Gets the customFieldValues value for this Creative.
*
* @return customFieldValues * The values of the custom fields associated with this creative.
*/
public com.google.api.ads.admanager.axis.v202111.BaseCustomFieldValue[] getCustomFieldValues() {
return customFieldValues;
}
/**
* Sets the customFieldValues value for this Creative.
*
* @param customFieldValues * The values of the custom fields associated with this creative.
*/
public void setCustomFieldValues(com.google.api.ads.admanager.axis.v202111.BaseCustomFieldValue[] customFieldValues) {
this.customFieldValues = customFieldValues;
}
public com.google.api.ads.admanager.axis.v202111.BaseCustomFieldValue getCustomFieldValues(int i) {
return this.customFieldValues[i];
}
public void setCustomFieldValues(int i, com.google.api.ads.admanager.axis.v202111.BaseCustomFieldValue _value) {
this.customFieldValues[i] = _value;
}
/**
* Gets the thirdPartyDataDeclaration value for this Creative.
*
* @return thirdPartyDataDeclaration * The third party companies associated with this creative.
*
* <p>This is distinct from any associated companies
* that Google may detect programmatically.
*/
public com.google.api.ads.admanager.axis.v202111.ThirdPartyDataDeclaration getThirdPartyDataDeclaration() {
return thirdPartyDataDeclaration;
}
/**
* Sets the thirdPartyDataDeclaration value for this Creative.
*
* @param thirdPartyDataDeclaration * The third party companies associated with this creative.
*
* <p>This is distinct from any associated companies
* that Google may detect programmatically.
*/
public void setThirdPartyDataDeclaration(com.google.api.ads.admanager.axis.v202111.ThirdPartyDataDeclaration thirdPartyDataDeclaration) {
this.thirdPartyDataDeclaration = thirdPartyDataDeclaration;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof Creative)) return false;
Creative other = (Creative) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.advertiserId==null && other.getAdvertiserId()==null) ||
(this.advertiserId!=null &&
this.advertiserId.equals(other.getAdvertiserId()))) &&
((this.id==null && other.getId()==null) ||
(this.id!=null &&
this.id.equals(other.getId()))) &&
((this.name==null && other.getName()==null) ||
(this.name!=null &&
this.name.equals(other.getName()))) &&
((this.size==null && other.getSize()==null) ||
(this.size!=null &&
this.size.equals(other.getSize()))) &&
((this.previewUrl==null && other.getPreviewUrl()==null) ||
(this.previewUrl!=null &&
this.previewUrl.equals(other.getPreviewUrl()))) &&
((this.policyLabels==null && other.getPolicyLabels()==null) ||
(this.policyLabels!=null &&
java.util.Arrays.equals(this.policyLabels, other.getPolicyLabels()))) &&
((this.appliedLabels==null && other.getAppliedLabels()==null) ||
(this.appliedLabels!=null &&
java.util.Arrays.equals(this.appliedLabels, other.getAppliedLabels()))) &&
((this.lastModifiedDateTime==null && other.getLastModifiedDateTime()==null) ||
(this.lastModifiedDateTime!=null &&
this.lastModifiedDateTime.equals(other.getLastModifiedDateTime()))) &&
((this.customFieldValues==null && other.getCustomFieldValues()==null) ||
(this.customFieldValues!=null &&
java.util.Arrays.equals(this.customFieldValues, other.getCustomFieldValues()))) &&
((this.thirdPartyDataDeclaration==null && other.getThirdPartyDataDeclaration()==null) ||
(this.thirdPartyDataDeclaration!=null &&
this.thirdPartyDataDeclaration.equals(other.getThirdPartyDataDeclaration())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getAdvertiserId() != null) {
_hashCode += getAdvertiserId().hashCode();
}
if (getId() != null) {
_hashCode += getId().hashCode();
}
if (getName() != null) {
_hashCode += getName().hashCode();
}
if (getSize() != null) {
_hashCode += getSize().hashCode();
}
if (getPreviewUrl() != null) {
_hashCode += getPreviewUrl().hashCode();
}
if (getPolicyLabels() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getPolicyLabels());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getPolicyLabels(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getAppliedLabels() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getAppliedLabels());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getAppliedLabels(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getLastModifiedDateTime() != null) {
_hashCode += getLastModifiedDateTime().hashCode();
}
if (getCustomFieldValues() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getCustomFieldValues());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getCustomFieldValues(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getThirdPartyDataDeclaration() != null) {
_hashCode += getThirdPartyDataDeclaration().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(Creative.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Creative"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("advertiserId");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "advertiserId"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("id");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "id"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("name");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "name"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("size");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "size"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Size"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("previewUrl");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "previewUrl"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("policyLabels");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "policyLabels"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CreativePolicyViolation"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("appliedLabels");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "appliedLabels"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AppliedLabel"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("lastModifiedDateTime");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "lastModifiedDateTime"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateTime"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("customFieldValues");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "customFieldValues"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "BaseCustomFieldValue"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("thirdPartyDataDeclaration");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "thirdPartyDataDeclaration"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ThirdPartyDataDeclaration"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.csp.sentinel.slots.statistic.metric;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import com.alibaba.csp.sentinel.slots.statistic.base.WindowWrap;
import com.alibaba.csp.sentinel.slots.statistic.data.MetricBucket;
import com.alibaba.csp.sentinel.util.TimeUtil;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Test cases for {@link BucketLeapArray}.
*
* @author Eric Zhao
*/
public class BucketLeapArrayTest {
private final int windowLengthInMs = 1000;
private final int intervalInSec = 2;
private final int intervalInMs = intervalInSec * 1000;
private final int sampleCount = intervalInMs / windowLengthInMs;
@Test
public void testNewWindow() {
BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
long time = TimeUtil.currentTimeMillis();
WindowWrap<MetricBucket> window = leapArray.currentWindow(time);
assertEquals(window.windowLength(), windowLengthInMs);
assertEquals(window.windowStart(), time - time % windowLengthInMs);
assertNotNull(window.value());
assertEquals(0L, window.value().pass());
}
@Test
public void testLeapArrayWindowStart() {
BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
long firstTime = TimeUtil.currentTimeMillis();
long previousWindowStart = firstTime - firstTime % windowLengthInMs;
WindowWrap<MetricBucket> window = leapArray.currentWindow(firstTime);
assertEquals(windowLengthInMs, window.windowLength());
assertEquals(previousWindowStart, window.windowStart());
}
@Test
public void testWindowAfterOneInterval() {
BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
long firstTime = TimeUtil.currentTimeMillis();
long previousWindowStart = firstTime - firstTime % windowLengthInMs;
WindowWrap<MetricBucket> window = leapArray.currentWindow(previousWindowStart);
assertEquals(windowLengthInMs, window.windowLength());
assertEquals(previousWindowStart, window.windowStart());
MetricBucket currentWindow = window.value();
assertNotNull(currentWindow);
currentWindow.addPass(1);
currentWindow.addBlock(1);
assertEquals(1L, currentWindow.pass());
assertEquals(1L, currentWindow.block());
long middleTime = previousWindowStart + windowLengthInMs / 2;
window = leapArray.currentWindow(middleTime);
assertEquals(previousWindowStart, window.windowStart());
MetricBucket middleWindow = window.value();
middleWindow.addPass(1);
assertSame(currentWindow, middleWindow);
assertEquals(2L, middleWindow.pass());
assertEquals(1L, middleWindow.block());
long nextTime = middleTime + windowLengthInMs / 2;
window = leapArray.currentWindow(nextTime);
assertEquals(windowLengthInMs, window.windowLength());
assertEquals(windowLengthInMs, window.windowStart() - previousWindowStart);
currentWindow = window.value();
assertNotNull(currentWindow);
assertEquals(0L, currentWindow.pass());
assertEquals(0L, currentWindow.block());
}
@Deprecated
public void testWindowDeprecatedRefresh() {
BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
final int len = sampleCount;
long firstTime = TimeUtil.currentTimeMillis();
List<WindowWrap<MetricBucket>> firstIterWindowList = new ArrayList<WindowWrap<MetricBucket>>(len);
for (int i = 0; i < len; i++) {
WindowWrap<MetricBucket> w = leapArray.currentWindow(firstTime + windowLengthInMs * i);
w.value().addPass(1);
firstIterWindowList.add(i, w);
}
for (int i = len; i < len * 2; i++) {
WindowWrap<MetricBucket> w = leapArray.currentWindow(firstTime + windowLengthInMs * i);
assertNotSame(w, firstIterWindowList.get(i - len));
}
}
@Test
public void testMultiThreadUpdateEmptyWindow() throws Exception {
final long time = TimeUtil.currentTimeMillis();
final int nThreads = 16;
final BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
final CountDownLatch latch = new CountDownLatch(nThreads);
Runnable task = new Runnable() {
@Override
public void run() {
leapArray.currentWindow(time).value().addPass(1);
latch.countDown();
}
};
for (int i = 0; i < nThreads; i++) {
new Thread(task).start();
}
latch.await();
assertEquals(nThreads, leapArray.currentWindow(time).value().pass());
}
@Test
public void testGetPreviousWindow() {
BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
long time = TimeUtil.currentTimeMillis();
WindowWrap<MetricBucket> previousWindow = leapArray.currentWindow(time);
assertNull(leapArray.getPreviousWindow(time));
long nextTime = time + windowLengthInMs;
assertSame(previousWindow, leapArray.getPreviousWindow(nextTime));
long longTime = time + 11 * windowLengthInMs;
assertNull(leapArray.getPreviousWindow(longTime));
}
@Test
public void testListWindowsResetOld() throws Exception {
final int windowLengthInMs = 100;
final int intervalInMs = 1000;
final int sampleCount = intervalInMs / windowLengthInMs;
BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
long time = TimeUtil.currentTimeMillis();
Set<WindowWrap<MetricBucket>> windowWraps = new HashSet<WindowWrap<MetricBucket>>();
windowWraps.add(leapArray.currentWindow(time));
windowWraps.add(leapArray.currentWindow(time + windowLengthInMs));
List<WindowWrap<MetricBucket>> list = leapArray.list();
for (WindowWrap<MetricBucket> wrap : list) {
assertTrue(windowWraps.contains(wrap));
}
Thread.sleep(windowLengthInMs + intervalInMs);
// This will replace the deprecated bucket, so all deprecated buckets will be reset.
leapArray.currentWindow(time + windowLengthInMs + intervalInMs).value().addPass(1);
assertEquals(1, leapArray.list().size());
}
@Test
public void testListWindowsNewBucket() throws Exception {
final int windowLengthInMs = 100;
final int intervalInSec = 1;
final int intervalInMs = intervalInSec * 1000;
final int sampleCount = intervalInMs / windowLengthInMs;
BucketLeapArray leapArray = new BucketLeapArray(sampleCount, intervalInMs);
long time = TimeUtil.currentTimeMillis();
Set<WindowWrap<MetricBucket>> windowWraps = new HashSet<WindowWrap<MetricBucket>>();
windowWraps.add(leapArray.currentWindow(time));
windowWraps.add(leapArray.currentWindow(time + windowLengthInMs));
Thread.sleep(intervalInMs + windowLengthInMs * 3);
List<WindowWrap<MetricBucket>> list = leapArray.list();
for (WindowWrap<MetricBucket> wrap : list) {
assertTrue(windowWraps.contains(wrap));
}
// This won't hit deprecated bucket, so no deprecated buckets will be reset.
// But deprecated buckets can be filtered when collecting list.
leapArray.currentWindow(TimeUtil.currentTimeMillis()).value().addPass(1);
assertEquals(1, leapArray.list().size());
}
}
| |
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.inject.matcher.Matchers;
import org.junit.Test;
import com.github.fge.lambdas.Throwing;
import com.google.common.collect.EvictingQueue;
import com.google.common.collect.Maps;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
public class MethodInterceptorTest {
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public static @interface Count {};
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public static @interface Timed {};
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public static @interface Tracked {};
@Tracked
public static class ExampleMethods {
@Count
@Timed
public int intMethod() { randomSleep(); return 1; }
@Count
@Timed
public void voidMethod() { randomSleep(); }
public void unannotatedMethod() { randomSleep(); }
}
@Test
public void simpleInterceptor() throws NoSuchMethodException, SecurityException {
Registry registry = new Registry();
Injector injector = Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(Matchers.annotatedWith(Tracked.class),Matchers.annotatedWith(Count.class), i -> {
return registry.counter(i.getMethod()).count(i::proceed);
});
}
});
Method intMethod = ExampleMethods.class.getMethod("intMethod");
Method voidMethod = ExampleMethods.class.getMethod("voidMethod");
ExampleMethods m = injector.getInstance(ExampleMethods.class);
assertThat(registry.getCounter(intMethod).isPresent(), equalTo(false));
assertThat(registry.getCounter(voidMethod).isPresent(), equalTo(false));
m.intMethod();
m.voidMethod();
assertThat(registry.getCounter(intMethod).get().getValue(), equalTo(1));
assertThat(registry.getCounter(voidMethod).get().getValue(), equalTo(1));
m.voidMethod();
assertThat(registry.getCounter(intMethod).get().getValue(), equalTo(1));
assertThat(registry.getCounter(voidMethod).get().getValue(), equalTo(2));
}
@Test
public void multipleInterceptors() throws NoSuchMethodException, SecurityException {
Registry registry = new Registry();
Injector injector = Guice.createInjector(new AbstractModule() {
@Override public void configure() {
bindInterceptor(Matchers.annotatedWith(Tracked.class),Matchers.annotatedWith(Count.class), i -> {
return registry.counter(i.getMethod()).count(i::proceed);
});
bindInterceptor(Matchers.annotatedWith(Tracked.class),Matchers.annotatedWith(Timed.class), i -> {
return registry.timer(i.getMethod()).time(i::proceed);
});
}
});
Method intMethod = ExampleMethods.class.getMethod("intMethod");
Method voidMethod = ExampleMethods.class.getMethod("voidMethod");
ExampleMethods m = injector.getInstance(ExampleMethods.class);
assertThat(registry.getCounter(intMethod).isPresent(), equalTo(false));
assertThat(registry.getTimer(voidMethod).isPresent(), equalTo(false));
m.intMethod();
m.voidMethod();
assertThat(registry.getCounter(intMethod).get().getValue(), equalTo(1));
assertThat(registry.getCounter(voidMethod).get().getValue(), equalTo(1));
m.voidMethod();
assertThat(registry.getCounter(intMethod).get().getValue(), equalTo(1));
assertThat(registry.getCounter(voidMethod).get().getValue(), equalTo(2));
assertThat(registry.getTimer(intMethod).get().getValue().getAsDouble(), greaterThan(0.0d));
assertThat(registry.getTimer(voidMethod).get().getValue().getAsDouble(), greaterThan(0.0d));
}
@Test
public void addingAnnotations() throws NoSuchMethodException, SecurityException {
Registry registry = new Registry();
Injector injector = Guice.createInjector(new AbstractModule() {
@Override public void configure() {
bindInterceptor(Matchers.annotatedWith(Tracked.class),Matchers.any(), i -> {
return registry.counter(i.getMethod()).count(i::proceed);
}, i -> {
return registry.timer(i.getMethod()).time(i::proceed);
});
}
});
Method intMethod = ExampleMethods.class.getMethod("intMethod");
Method voidMethod = ExampleMethods.class.getMethod("voidMethod");
Method unannotatedMethod = ExampleMethods.class.getMethod("unannotatedMethod");
ExampleMethods m = injector.getInstance(ExampleMethods.class);
assertThat(registry.getCounter(intMethod).isPresent(), equalTo(false));
assertThat(registry.getTimer(voidMethod).isPresent(), equalTo(false));
m.intMethod();
m.voidMethod();
m.unannotatedMethod();
assertThat(registry.getCounter(intMethod).get().getValue(), equalTo(1));
assertThat(registry.getCounter(voidMethod).get().getValue(), equalTo(1));
assertThat(registry.getCounter(unannotatedMethod).get().getValue(), equalTo(1));
m.voidMethod();
assertThat(registry.getCounter(intMethod).get().getValue(), equalTo(1));
assertThat(registry.getCounter(voidMethod).get().getValue(), equalTo(2));
assertThat(registry.getTimer(intMethod).get().getValue().getAsDouble(), greaterThan(0.0d));
assertThat(registry.getTimer(voidMethod).get().getValue().getAsDouble(), greaterThan(0.0d));
}
public static interface ThrowingCallable<T> {
public T call() throws Throwable;
}
public static class Counter {
AtomicInteger count = new AtomicInteger();
public <T> T count( ThrowingCallable<T> c ) throws Throwable {
count.incrementAndGet();
return c.call();
}
public int getValue() {
return count.get();
}
}
public static class Timer {
public EvictingQueue<Long> times = EvictingQueue.create(10);
public <T> T time( ThrowingCallable<T> c ) throws Throwable {
long startTime = System.currentTimeMillis();
try {
return c.call();
} finally {
long endTime = System.currentTimeMillis();
times.add(endTime-startTime);
}
}
public OptionalDouble getValue() {
return times.stream().mapToLong(Long::longValue).average();
}
}
public static class Registry {
private Map<Method, Counter> counters = Maps.newConcurrentMap();
private Map<Method, Timer> timers = Maps.newConcurrentMap();
public Counter counter(Method method) {
return counters.computeIfAbsent(method, k->new Counter());
}
public Timer timer( Method method ) {
return timers.computeIfAbsent(method, k->new Timer());
}
public Optional<Counter> getCounter(Method method) {
return Optional.ofNullable(counters.get(method));
}
public Optional<Timer> getTimer(Method method) {
return Optional.ofNullable(timers.get(method));
}
}
public static Random random = new Random();
public static void randomSleep() {
Throwing.runnable(()->MILLISECONDS.sleep(random.nextInt(9)+1)).run();
}
}
| |
/*******************************************************************************
* Copyright (c) 2013, 2016 EclipseSource.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.eclipsesource.json;
import java.io.IOException;
import java.io.Reader;
import java.io.Serializable;
import java.io.StringWriter;
import java.io.Writer;
/**
* Represents a JSON value. This can be a JSON <strong>object</strong>, an <strong> array</strong>,
* a <strong>number</strong>, a <strong>string</strong>, or one of the literals
* <strong>true</strong>, <strong>false</strong>, and <strong>null</strong>.
* <p>
* The literals <strong>true</strong>, <strong>false</strong>, and <strong>null</strong> are
* represented by the constants {@link #TRUE}, {@link #FALSE}, and {@link #NULL}.
* </p>
* <p>
* JSON <strong>objects</strong> and <strong>arrays</strong> are represented by the subtypes
* {@link JsonObject} and {@link JsonArray}. Instances of these types can be created using the
* public constructors of these classes.
* </p>
* <p>
* Instances that represent JSON <strong>numbers</strong>, <strong>strings</strong> and
* <strong>boolean</strong> values can be created using the static factory methods
* {@link #valueOf(String)}, {@link #valueOf(long)}, {@link #valueOf(double)}, etc.
* </p>
* <p>
* In order to find out whether an instance of this class is of a certain type, the methods
* {@link #isObject()}, {@link #isArray()}, {@link #isString()}, {@link #isNumber()} etc. can be
* used.
* </p>
* <p>
* If the type of a JSON value is known, the methods {@link #asObject()}, {@link #asArray()},
* {@link #asString()}, {@link #asInt()}, etc. can be used to get this value directly in the
* appropriate target type.
* </p>
* <p>
* This class is <strong>not supposed to be extended</strong> by clients.
* </p>
*/
@SuppressWarnings("serial") // use default serial UID
public abstract class JsonValue implements Serializable {
/**
* Represents the JSON literal <code>true</code>.
* @deprecated Use <code>Json.TRUE</code> instead
*/
@Deprecated
public static final JsonValue TRUE = new JsonLiteral("true");
/**
* Represents the JSON literal <code>false</code>.
* @deprecated Use <code>Json.FALSE</code> instead
*/
@Deprecated
public static final JsonValue FALSE = new JsonLiteral("false");
/**
* Represents the JSON literal <code>null</code>.
* @deprecated Use <code>Json.NULL</code> instead
*/
@Deprecated
public static final JsonValue NULL = new JsonLiteral("null");
JsonValue() {
// prevent subclasses outside of this package
}
/**
* Reads a JSON value from the given reader.
* <p>
* Characters are read in chunks and buffered internally, therefore wrapping an existing reader in
* an additional <code>BufferedReader</code> does <strong>not</strong> improve reading
* performance.
* </p>
*
* @param reader
* the reader to read the JSON value from
* @return the JSON value that has been read
* @throws IOException
* if an I/O error occurs in the reader
* @throws ParseException
* if the input is not valid JSON
* @deprecated Use {@link Json#parse(Reader)} instead
*/
@Deprecated
public static JsonValue readFrom(Reader reader) throws IOException {
return Json.parse(reader);
}
/**
* Reads a JSON value from the given string.
*
* @param text
* the string that contains the JSON value
* @return the JSON value that has been read
* @throws ParseException
* if the input is not valid JSON
* @deprecated Use {@link Json#parse(String)} instead
*/
@Deprecated
public static JsonValue readFrom(String text) {
return Json.parse(text);
}
/**
* Returns a JsonValue instance that represents the given <code>int</code> value.
*
* @param value
* the value to get a JSON representation for
* @return a JSON value that represents the given value
* @deprecated Use <code>Json.value()</code> instead
*/
@Deprecated
public static JsonValue valueOf(int value) {
return Json.value(value);
}
/**
* Returns a JsonValue instance that represents the given <code>long</code> value.
*
* @param value
* the value to get a JSON representation for
* @return a JSON value that represents the given value
* @deprecated Use <code>Json.value()</code> instead
*/
@Deprecated
public static JsonValue valueOf(long value) {
return Json.value(value);
}
/**
* Returns a JsonValue instance that represents the given <code>float</code> value.
*
* @param value
* the value to get a JSON representation for
* @return a JSON value that represents the given value
* @deprecated Use <code>Json.value()</code> instead
*/
@Deprecated
public static JsonValue valueOf(float value) {
return Json.value(value);
}
/**
* Returns a JsonValue instance that represents the given <code>double</code> value.
*
* @param value
* the value to get a JSON representation for
* @return a JSON value that represents the given value
* @deprecated Use <code>Json.value()</code> instead
*/
@Deprecated
public static JsonValue valueOf(double value) {
return Json.value(value);
}
/**
* Returns a JsonValue instance that represents the given string.
*
* @param string
* the string to get a JSON representation for
* @return a JSON value that represents the given string
* @deprecated Use <code>Json.value()</code> instead
*/
@Deprecated
public static JsonValue valueOf(String string) {
return Json.value(string);
}
/**
* Returns a JsonValue instance that represents the given <code>boolean</code> value.
*
* @param value
* the value to get a JSON representation for
* @return a JSON value that represents the given value
* @deprecated Use <code>Json.value()</code> instead
*/
@Deprecated
public static JsonValue valueOf(boolean value) {
return Json.value(value);
}
/**
* Detects whether this value represents a JSON object. If this is the case, this value is an
* instance of {@link JsonObject}.
*
* @return <code>true</code> if this value is an instance of JsonObject
*/
public boolean isObject() {
return false;
}
/**
* Detects whether this value represents a JSON array. If this is the case, this value is an
* instance of {@link JsonArray}.
*
* @return <code>true</code> if this value is an instance of JsonArray
*/
public boolean isArray() {
return false;
}
/**
* Detects whether this value represents a JSON number.
*
* @return <code>true</code> if this value represents a JSON number
*/
public boolean isNumber() {
return false;
}
/**
* Detects whether this value represents a JSON string.
*
* @return <code>true</code> if this value represents a JSON string
*/
public boolean isString() {
return false;
}
/**
* Detects whether this value represents a boolean value.
*
* @return <code>true</code> if this value represents either the JSON literal <code>true</code> or
* <code>false</code>
*/
public boolean isBoolean() {
return false;
}
/**
* Detects whether this value represents the JSON literal <code>true</code>.
*
* @return <code>true</code> if this value represents the JSON literal <code>true</code>
*/
public boolean isTrue() {
return false;
}
/**
* Detects whether this value represents the JSON literal <code>false</code>.
*
* @return <code>true</code> if this value represents the JSON literal <code>false</code>
*/
public boolean isFalse() {
return false;
}
/**
* Detects whether this value represents the JSON literal <code>null</code>.
*
* @return <code>true</code> if this value represents the JSON literal <code>null</code>
*/
public boolean isNull() {
return false;
}
/**
* Returns this JSON value as {@link JsonObject}, assuming that this value represents a JSON
* object. If this is not the case, an exception is thrown.
*
* @return a JSONObject for this value
* @throws UnsupportedOperationException
* if this value is not a JSON object
*/
public JsonObject asObject() {
throw new UnsupportedOperationException("Not an object: " + toString());
}
/**
* Returns this JSON value as {@link JsonArray}, assuming that this value represents a JSON array.
* If this is not the case, an exception is thrown.
*
* @return a JSONArray for this value
* @throws UnsupportedOperationException
* if this value is not a JSON array
*/
public JsonArray asArray() {
throw new UnsupportedOperationException("Not an array: " + toString());
}
/**
* Returns this JSON value as an <code>int</code> value, assuming that this value represents a
* JSON number that can be interpreted as Java <code>int</code>. If this is not the case, an
* exception is thrown.
* <p>
* To be interpreted as Java <code>int</code>, the JSON number must neither contain an exponent
* nor a fraction part. Moreover, the number must be in the <code>Integer</code> range.
* </p>
*
* @return this value as <code>int</code>
* @throws UnsupportedOperationException
* if this value is not a JSON number
* @throws NumberFormatException
* if this JSON number can not be interpreted as <code>int</code> value
*/
public int asInt() {
throw new UnsupportedOperationException("Not a number: " + toString());
}
/**
* Returns this JSON value as a <code>long</code> value, assuming that this value represents a
* JSON number that can be interpreted as Java <code>long</code>. If this is not the case, an
* exception is thrown.
* <p>
* To be interpreted as Java <code>long</code>, the JSON number must neither contain an exponent
* nor a fraction part. Moreover, the number must be in the <code>Long</code> range.
* </p>
*
* @return this value as <code>long</code>
* @throws UnsupportedOperationException
* if this value is not a JSON number
* @throws NumberFormatException
* if this JSON number can not be interpreted as <code>long</code> value
*/
public long asLong() {
throw new UnsupportedOperationException("Not a number: " + toString());
}
/**
* Returns this JSON value as a <code>float</code> value, assuming that this value represents a
* JSON number. If this is not the case, an exception is thrown.
* <p>
* If the JSON number is out of the <code>Float</code> range, {@link Float#POSITIVE_INFINITY} or
* {@link Float#NEGATIVE_INFINITY} is returned.
* </p>
*
* @return this value as <code>float</code>
* @throws UnsupportedOperationException
* if this value is not a JSON number
*/
public float asFloat() {
throw new UnsupportedOperationException("Not a number: " + toString());
}
/**
* Returns this JSON value as a <code>double</code> value, assuming that this value represents a
* JSON number. If this is not the case, an exception is thrown.
* <p>
* If the JSON number is out of the <code>Double</code> range, {@link Double#POSITIVE_INFINITY} or
* {@link Double#NEGATIVE_INFINITY} is returned.
* </p>
*
* @return this value as <code>double</code>
* @throws UnsupportedOperationException
* if this value is not a JSON number
*/
public double asDouble() {
throw new UnsupportedOperationException("Not a number: " + toString());
}
/**
* Returns this JSON value as String, assuming that this value represents a JSON string. If this
* is not the case, an exception is thrown.
*
* @return the string represented by this value
* @throws UnsupportedOperationException
* if this value is not a JSON string
*/
public String asString() {
throw new UnsupportedOperationException("Not a string: " + toString());
}
/**
* Returns this JSON value as a <code>boolean</code> value, assuming that this value is either
* <code>true</code> or <code>false</code>. If this is not the case, an exception is thrown.
*
* @return this value as <code>boolean</code>
* @throws UnsupportedOperationException
* if this value is neither <code>true</code> or <code>false</code>
*/
public boolean asBoolean() {
throw new UnsupportedOperationException("Not a boolean: " + toString());
}
/**
* Writes the JSON representation of this value to the given writer in its minimal form, without
* any additional whitespace.
* <p>
* Writing performance can be improved by using a {@link java.io.BufferedWriter BufferedWriter}.
* </p>
*
* @param writer
* the writer to write this value to
* @throws IOException
* if an I/O error occurs in the writer
*/
public void writeTo(Writer writer) throws IOException {
writeTo(writer, WriterConfig.MINIMAL);
}
/**
* Writes the JSON representation of this value to the given writer using the given formatting.
* <p>
* Writing performance can be improved by using a {@link java.io.BufferedWriter BufferedWriter}.
* </p>
*
* @param writer
* the writer to write this value to
* @param config
* a configuration that controls the formatting or <code>null</code> for the minimal form
* @throws IOException
* if an I/O error occurs in the writer
*/
public void writeTo(Writer writer, WriterConfig config) throws IOException {
if (writer == null) {
throw new NullPointerException("writer is null");
}
if (config == null) {
throw new NullPointerException("config is null");
}
WritingBuffer buffer = new WritingBuffer(writer, 128);
write(config.createWriter(buffer));
buffer.flush();
}
/**
* Returns the JSON string for this value in its minimal form, without any additional whitespace.
* The result is guaranteed to be a valid input for the method {@link #readFrom(String)} and to
* create a value that is <em>equal</em> to this object.
*
* @return a JSON string that represents this value
*/
@Override
public String toString() {
return toString(WriterConfig.MINIMAL);
}
/**
* Returns the JSON string for this value using the given formatting.
*
* @param config
* a configuration that controls the formatting or <code>null</code> for the minimal form
* @return a JSON string that represents this value
*/
public String toString(WriterConfig config) {
StringWriter writer = new StringWriter();
try {
writeTo(writer, config);
} catch (IOException exception) {
// StringWriter does not throw IOExceptions
throw new RuntimeException(exception);
}
return writer.toString();
}
/**
* Indicates whether some other object is "equal to" this one according to the contract specified
* in {@link Object#equals(Object)}.
* <p>
* Two JsonValues are considered equal if and only if they represent the same JSON text. As a
* consequence, two given JsonObjects may be different even though they contain the same set of
* names with the same values, but in a different order.
* </p>
*
* @param object
* the reference object with which to compare
* @return true if this object is the same as the object argument; false otherwise
*/
@Override
public boolean equals(Object object) {
return super.equals(object);
}
@Override
public int hashCode() {
return super.hashCode();
}
abstract void write(JsonWriter writer) throws IOException;
}
| |
/*
* Copyright 2015-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nebula.plugin.metrics;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.HashMap;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Nebula build metrics plugin extension.
*
* @author Danny Thomas
*/
public class MetricsPluginExtension {
/**
* The name used when adding this extension to the extension container.
*/
public static final String METRICS_EXTENSION_NAME = "metrics";
private static final String INDEX_PREFIX = "build-metrics-";
public static final DateTimeFormatter ROLLING_FORMATTER = DateTimeFormat.forPattern("yyyyMM");
public static final String DEFAULT_INDEX_NAME = "default";
private String fullURI = null;
private String hostname = "localhost";
private int transportPort = 9300;
private int httpPort = 9200;
private String clusterName = "elasticsearch";
private String indexName = DEFAULT_INDEX_NAME;
private String esBasicAuthUsername;
private String esBasicAuthPassword;
private boolean rollingIndex = false;
private String metricsIndexMappingFile; // location of mapping file used to create the rolling metrics index (optional)
private String restUri = "http://localhost/metrics";
private String restBuildEventName = "build_metrics";
private String restLogEventName = "build_logs";
private String splunkUri = "http://localhost/";
private String splunkInputType = "HTTP_COLLECTOR";
private HashMap<String,String> headers = new HashMap<String,String>();
private DispatcherType dispatcherType = DispatcherType.ES_HTTP;
private List<String> sanitizedProperties = new ArrayList<>();
private String sanitizedPropertiesRegex = "(?i).*_(TOKEN|KEY|SECRET|PASSWORD)$";
private boolean failOnError = true;
private boolean verboseErrorOutput = false;
public String getHostname() {
return hostname;
}
public void setHostname(String hostname) {
this.hostname = checkNotNull(hostname);
}
public String getFullURI() {
return fullURI;
}
public void setFullURI(String fullURI) {
this.fullURI = fullURI;
}
public String getEsBasicAuthUsername() {
return esBasicAuthUsername;
}
public void setEsBasicAuthUsername(String esBasicAuthUsername) {
this.esBasicAuthUsername = checkNotNull(esBasicAuthUsername);
}
public String getEsBasicAuthPassword() {
return esBasicAuthPassword;
}
public void setEsBasicAuthPassword(String esBasicAuthPassword) {
this.esBasicAuthPassword = checkNotNull(esBasicAuthPassword);
}
public int getTransportPort() {
return transportPort;
}
public void setTransportPort(int transportPort) {
this.transportPort = transportPort;
}
public int getHttpPort() {
return httpPort;
}
public void setHttpPort(int httpPort) {
this.httpPort = httpPort;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = checkNotNull(clusterName);
}
public String getIndexName() {
return getIndexName(DateTime.now());
}
public String getIndexName(@Nullable DateTime dt) {
String name = INDEX_PREFIX + indexName;
return rollingIndex ? name + "-" + ROLLING_FORMATTER.print(dt) : name;
}
public void setIndexName(String indexName) {
this.indexName = checkNotNull(indexName);
}
public DispatcherType getDispatcherType() {
return dispatcherType;
}
public void setDispatcherType(String dispatcherType) {
this.dispatcherType = DispatcherType.valueOf(dispatcherType.toUpperCase());
}
public List<String> getSanitizedProperties() {
return sanitizedProperties;
}
public void setSanitizedProperties(List<String> sanitizedProperties) {
this.sanitizedProperties = checkNotNull(sanitizedProperties);
}
public String getSanitizedPropertiesRegex() {
return sanitizedPropertiesRegex;
}
public void setSanitizedPropertiesRegex(String sanitizedPropertiesRegex) {
this.sanitizedPropertiesRegex = checkNotNull(sanitizedPropertiesRegex);
}
public String getRestLogEventName() {
return restLogEventName;
}
public void setRestLogEventName(String restLogEventName) {
this.restLogEventName = checkNotNull(restLogEventName);
}
public String getSplunkUri() {
return splunkUri;
}
public void setSplunkUri(String splunkUri) {
this.splunkUri = checkNotNull(splunkUri);
}
public String getSplunkInputType() {
return splunkInputType;
}
public void setSplunkInputType(String splunkInputType) {
this.splunkInputType = checkNotNull(splunkInputType);
}
public void setHeaders(HashMap<String,String> headers) {
this.headers = checkNotNull(headers);
}
public HashMap<String,String> getHeaders() {
return headers;
}
public String getRestBuildEventName() {
return restBuildEventName;
}
public void setRestBuildEventName(String restBuildEventName) {
this.restBuildEventName = checkNotNull(restBuildEventName);
}
public String getRestUri() {
return restUri;
}
public void setRestUri(String restUri) {
this.restUri = checkNotNull(restUri);
}
public boolean isFailOnError() {
return failOnError;
}
public void setFailOnError(boolean failOnError) {
this.failOnError = failOnError;
}
public boolean isVerboseErrorOutput() {
return verboseErrorOutput;
}
public void setVerboseErrorOutput(boolean verboseErrorOutput) {
this.verboseErrorOutput = verboseErrorOutput;
}
public boolean isRollingIndex() {
return rollingIndex;
}
public void setRollingIndex(boolean rollingIndex) {
this.rollingIndex = rollingIndex;
}
public String getMetricsIndexMappingFile() {
return metricsIndexMappingFile;
}
public void setMetricsIndexMappingFile(String metricsIndexMappingFile) {
this.metricsIndexMappingFile = checkNotNull(metricsIndexMappingFile);
}
public enum DispatcherType {
ES_HTTP,
SPLUNK,
REST,
NOOP,
CUSTOM
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dns;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.api.services.dns.model.Change;
import com.google.common.base.Function;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.List;
import java.util.Objects;
/**
* An immutable class representing an atomic update to a collection of {@link RecordSet}s within a
* {@code Zone}.
*
* @see <a href="https://cloud.google.com/dns/api/v1/changes">Google Cloud DNS documentation</a>
*/
public class ChangeRequest extends ChangeRequestInfo {
private static final long serialVersionUID = 5335667200595081449L;
private final DnsOptions options;
private final String zone;
private transient Dns dns;
/**
* A builder for {@code ChangeRequest}s.
*/
public static class Builder extends ChangeRequestInfo.Builder {
private final Dns dns;
private final String zone;
private final ChangeRequestInfo.BuilderImpl infoBuilder;
private Builder(ChangeRequest cr) {
this.dns = cr.dns;
this.zone = cr.zone;
this.infoBuilder = new ChangeRequestInfo.BuilderImpl(cr);
}
@Override
public Builder setAdditions(List<RecordSet> additions) {
infoBuilder.setAdditions(additions);
return this;
}
@Override
public Builder setDeletions(List<RecordSet> deletions) {
infoBuilder.setDeletions(deletions);
return this;
}
@Override
public Builder add(RecordSet recordSet) {
infoBuilder.add(recordSet);
return this;
}
@Override
public Builder delete(RecordSet recordSet) {
infoBuilder.delete(recordSet);
return this;
}
@Override
public Builder clearAdditions() {
infoBuilder.clearAdditions();
return this;
}
@Override
public Builder clearDeletions() {
infoBuilder.clearDeletions();
return this;
}
@Override
public Builder removeAddition(RecordSet recordSet) {
infoBuilder.removeAddition(recordSet);
return this;
}
@Override
public Builder removeDeletion(RecordSet recordSet) {
infoBuilder.removeDeletion(recordSet);
return this;
}
@Override
Builder setGeneratedId(String generatedId) {
infoBuilder.setGeneratedId(generatedId);
return this;
}
@Override
Builder setStartTime(long startTimeMillis) {
infoBuilder.setStartTime(startTimeMillis);
return this;
}
@Override
Builder setStatus(Status status) {
infoBuilder.setStatus(status);
return this;
}
@Override
public ChangeRequest build() {
return new ChangeRequest(dns, zone, infoBuilder);
}
}
ChangeRequest(Dns dns, String zone, ChangeRequest.BuilderImpl infoBuilder) {
super(infoBuilder);
this.zone = checkNotNull(zone);
this.dns = checkNotNull(dns);
this.options = dns.getOptions();
}
/**
* Returns the name of the {@link Zone} associated with this change request.
*/
public String getZone() {
return this.zone;
}
/**
* Returns the change request's {@code Dns} object used to issue requests.
*/
public Dns getDns() {
return dns;
}
/**
* Applies this change request to the zone identified by {@code zoneName}.
*
* @throws DnsException upon failure or if zone is not found
*/
public ChangeRequest applyTo(String zoneName, Dns.ChangeRequestOption... options) {
return dns.applyChangeRequest(zoneName, this, options);
}
/**
* Retrieves the up-to-date information about the change request from Google Cloud DNS. Parameter
* {@code options} can be used to restrict the fields to be included in the updated object the
* same way as in {@link Dns#getChangeRequest(String, String, Dns.ChangeRequestOption...)}. If
* {@code options} are provided, any field other than generatedId which is not included in the
* {@code options} will be {@code null} regardless of whether they are initialized or not in
* {@code this} instance.
*
* @return an object with the updated information or {@code null} if it does not exist
* @throws DnsException upon failure of the API call or if the associated zone was not found
*/
public ChangeRequest reload(Dns.ChangeRequestOption... options) {
return dns.getChangeRequest(zone, getGeneratedId(), options);
}
/**
* Returns {@code true} if the change request has been completed. If the status is not {@link
* ChangeRequestInfo.Status#DONE} already, the method makes an API call to Google Cloud DNS to
* update the change request first.
*
* @throws DnsException upon failure of the API call or if the associated zone was not found
*/
public boolean isDone() {
if (status() == Status.DONE) {
return true;
}
ChangeRequest updated = reload(Dns.ChangeRequestOption.fields(Dns.ChangeRequestField.STATUS));
return updated == null || updated.status() == Status.DONE;
}
@Override
public Builder toBuilder() {
return new Builder(this);
}
@Override
public final boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || !obj.getClass().equals(ChangeRequest.class)) {
return false;
}
ChangeRequest other = (ChangeRequest) obj;
return Objects.equals(toPb(), other.toPb())
&& Objects.equals(options, other.options)
&& Objects.equals(zone, other.zone);
}
@Override
public final int hashCode() {
return Objects.hash(super.hashCode(), options, zone);
}
private void readObject(ObjectInputStream input) throws IOException, ClassNotFoundException {
input.defaultReadObject();
this.dns = options.getService();
}
static ChangeRequest fromPb(Dns dns, String zoneName, Change pb) {
ChangeRequestInfo info = ChangeRequestInfo.fromPb(pb);
return new ChangeRequest(dns, zoneName, new ChangeRequestInfo.BuilderImpl(info));
}
static Function<Change, ChangeRequest> fromPbFunction(final Dns dns, final String zoneName) {
return new Function<Change, ChangeRequest>() {
@Override
public ChangeRequest apply(Change pb) {
return ChangeRequest.fromPb(dns, zoneName, pb);
}
};
}
}
| |
package org.marketcetera.saclient.rpc;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.marketcetera.module.ModuleInfo;
import org.marketcetera.module.ModuleURN;
import org.marketcetera.saclient.CreateStrategyParameters;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/* $License$ */
/**
* Provides an implementation of <code>SAClientServiceAdapter</code> for testing.
*
* @author <a href="mailto:colin@marketcetera.com">Colin DuPlantis</a>
* @version $Id$
* @since $Release$
*/
public class MockSAClientServiceAdapter
implements SAServiceAdapter
{
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#getProviders()
*/
@Override
public List<ModuleURN> getProviders()
{
providersCount.incrementAndGet();
return providersToReturn;
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#getInstances(org.marketcetera.module.ModuleURN)
*/
@Override
public List<ModuleURN> getInstances(ModuleURN inProvider)
{
instancesRequests.add(inProvider);
return instancesToReturn;
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#getModuleInfo(org.marketcetera.module.ModuleURN)
*/
@Override
public ModuleInfo getModuleInfo(ModuleURN inInstance)
{
moduleInfoRequests.add(inInstance);
return moduleInfoToReturn;
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#start(org.marketcetera.module.ModuleURN)
*/
@Override
public void start(ModuleURN inInstance)
{
startRequests.add(inInstance);
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#stop(org.marketcetera.module.ModuleURN)
*/
@Override
public void stop(ModuleURN inInstance)
{
stopRequests.add(inInstance);
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#delete(org.marketcetera.module.ModuleURN)
*/
@Override
public void delete(ModuleURN inInstance)
{
deleteRequests.add(inInstance);
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#getProperties(org.marketcetera.module.ModuleURN)
*/
@Override
public Map<String,Object> getProperties(ModuleURN inInstance)
{
propertiesRequests.add(inInstance);
return propertiesToReturn;
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#setProperties(org.marketcetera.module.ModuleURN, java.util.Map)
*/
@Override
public Map<String,Object> setProperties(ModuleURN inInstance,
Map<String,Object> inProperties)
{
propertiesRequests.add(inInstance);
return inProperties;
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#createStrategy(org.marketcetera.saclient.CreateStrategyParameters)
*/
@Override
public ModuleURN createStrategy(CreateStrategyParameters inParameters)
{
createStrategyRequests.add(inParameters);
return createModuleURNToReturn;
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#getStrategyCreateParms(org.marketcetera.module.ModuleURN)
*/
@Override
public CreateStrategyParameters getStrategyCreateParms(ModuleURN inInstance)
{
strategyCreateParmsRequests.add(inInstance);
return parametersToReturn;
}
/* (non-Javadoc)
* @see org.marketcetera.saclient.rpc.SAClientServiceAdapter#sendData(java.lang.Object)
*/
@Override
public void sendData(Object inData)
{
sentData = inData;
}
/**
* Get the providersToReturn value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getProvidersToReturn()
{
return providersToReturn;
}
/**
* Get the providersCount value.
*
* @return an <code>AtomicInteger</code> value
*/
public AtomicInteger getProvidersCount()
{
return providersCount;
}
/**
* Get the instancesToReturn value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getInstancesToReturn()
{
return instancesToReturn;
}
/**
* Get the instancesRequests value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getInstancesRequests()
{
return instancesRequests;
}
/**
* Get the moduleInfoToReturn value.
*
* @return a <code>ModuleInfo</code> value
*/
public ModuleInfo getModuleInfoToReturn()
{
return moduleInfoToReturn;
}
/**
* Sets the moduleInfoToReturn value.
*
* @param inModuleInfoToReturn a <code>ModuleInfo</code> value
*/
public void setModuleInfoToReturn(ModuleInfo inModuleInfoToReturn)
{
moduleInfoToReturn = inModuleInfoToReturn;
}
/**
* Get the moduleInfoRequests value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getModuleInfoRequests()
{
return moduleInfoRequests;
}
/**
* Get the startRequests value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getStartRequests()
{
return startRequests;
}
/**
* Get the stopRequests value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getStopRequests()
{
return stopRequests;
}
/**
* Get the deleteRequests value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getDeleteRequests()
{
return deleteRequests;
}
/**
* Get the propertiesRequests value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getPropertiesRequests()
{
return propertiesRequests;
}
/**
* Sets the propertiesRequests value.
*
* @param inPropertiesRequests a <code>List<ModuleURN></code> value
*/
public void setPropertiesRequests(List<ModuleURN> inPropertiesRequests)
{
propertiesRequests = inPropertiesRequests;
}
/**
* Get the propertiesToReturn value.
*
* @return a <code>Map<String,Object></code> value
*/
public Map<String,Object> getPropertiesToReturn()
{
return propertiesToReturn;
}
/**
* Get the createModuleURNToReturn value.
*
* @return a <code>ModuleURN</code> value
*/
public ModuleURN getCreateModuleURNToReturn()
{
return createModuleURNToReturn;
}
/**
* Sets the createModuleURNToReturn value.
*
* @param inCreateModuleURNToReturn a <code>ModuleURN</code> value
*/
public void setCreateModuleURNToReturn(ModuleURN inCreateModuleURNToReturn)
{
createModuleURNToReturn = inCreateModuleURNToReturn;
}
/**
* Get the createStrategyRequests value.
*
* @return a <code>List<CreateStrategyParameters></code> value
*/
public List<CreateStrategyParameters> getCreateStrategyRequests()
{
return createStrategyRequests;
}
/**
* Get the parametersToReturn value.
*
* @return a <code>CreateStrategyParameters</code> value
*/
public CreateStrategyParameters getParametersToReturn()
{
return parametersToReturn;
}
/**
* Sets the parametersToReturn value.
*
* @param inParametersToReturn a <code>CreateStrategyParameters</code> value
*/
public void setParametersToReturn(CreateStrategyParameters inParametersToReturn)
{
parametersToReturn = inParametersToReturn;
}
/**
* Get the strategyCreateParmsRequests value.
*
* @return a <code>List<ModuleURN></code> value
*/
public List<ModuleURN> getStrategyCreateParmsRequests()
{
return strategyCreateParmsRequests;
}
/**
* Get the sentData value.
*
* @return an <code>Object</code> value
*/
public Object getSentData()
{
return sentData;
}
/**
* Sets the sentData value.
*
* @param inSentData an <code>Object</code> value
*/
public void setSentData(Object inSentData)
{
sentData = inSentData;
}
/**
* Resets test values.
*/
public void reset()
{
providersToReturn.clear();
providersCount.set(0);
instancesToReturn.clear();
instancesRequests.clear();
moduleInfoRequests.clear();
moduleInfoToReturn = null;
startRequests.clear();
stopRequests.clear();
deleteRequests.clear();
propertiesRequests.clear();
propertiesToReturn.clear();
createStrategyRequests.clear();
createModuleURNToReturn = null;
parametersToReturn = null;
strategyCreateParmsRequests.clear();
sentData = null;
}
/**
* provides to return value
*/
private final List<ModuleURN> providersToReturn = Lists.newArrayList();
/**
* count of providers invocation
*/
private final AtomicInteger providersCount = new AtomicInteger(0);
/**
* instances to return value
*/
private final List<ModuleURN> instancesToReturn = Lists.newArrayList();
/**
* instances requests
*/
private final List<ModuleURN> instancesRequests = Lists.newArrayList();
/**
* module info requests
*/
private final List<ModuleURN> moduleInfoRequests = Lists.newArrayList();
/**
* module info to return
*/
private ModuleInfo moduleInfoToReturn;
/**
* start requests value
*/
private final List<ModuleURN> startRequests = Lists.newArrayList();
/**
* stop requests value
*/
private final List<ModuleURN> stopRequests = Lists.newArrayList();
/**
* delete requests value
*/
private final List<ModuleURN> deleteRequests = Lists.newArrayList();
/**
* properties requests value
*/
private List<ModuleURN> propertiesRequests = Lists.newArrayList();
/**
* properties to return value
*/
private final Map<String,Object> propertiesToReturn = Maps.newHashMap();
/**
* create strategy requests value
*/
private final List<CreateStrategyParameters> createStrategyRequests = Lists.newArrayList();
/**
* create module urn to return
*/
private ModuleURN createModuleURNToReturn;
/**
* create strategy parameters to return
*/
private CreateStrategyParameters parametersToReturn;
/**
* create strategy params requests
*/
private final List<ModuleURN> strategyCreateParmsRequests = Lists.newArrayList();
/**
* object sent to {@link #sendData(Object)}
*/
private Object sentData;
}
| |
package ser321.movieServer;
import org.json.*;
import java.util.Vector;
/**
* Copyright 2016 Kaelan Strones,
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Purpose: Practice C++ Programming
* Ser321 Foundations of Distributed Applications
* @author Kaelan Strones kstrones@asu.edu
* Software Engineering, ASU Poly
* @version August 2016
*/
public class movieDescription {
//Default values will say no description
private String Title, Rating, Release, Runtime, Plot, Filename;
private Vector<String> Genre, Actors;
/**
* Constructor taking in no information
*/
public movieDescription(){
Title = "No Description";
Rating = "No Description";
Release = "No Description";
Runtime = "No Description";
Plot = "No Description";
Filename = "No Description";
}
/**
* constructor taking in all of the movie information
* @param newTitle
* @param newRating
* @param newRelease
* @param newRuntime
* @param newPlot
* @param newFilename
* @param newGenre
* @param newActors
*/
public movieDescription(String newTitle, String newRating,
String newRelease, String newRuntime,
String newPlot, String newFilename,
Vector<String> newGenre, Vector<String> newActors){
Title = newTitle;
Rating = newRating;
Release = newRelease;
Runtime = newRuntime;
Plot = newPlot;
Filename = newFilename;
Genre = newGenre;
Actors = newActors;
}
/**
* Constructor that will take in all of the movie information in a JSON file
* @param jsonObj the new movie file
*/
public movieDescription(JSONObject jsonObj){
//Each parameter can be pulled by their keyword for their respecive area
Title = jsonObj.getString("Title");
Rating = jsonObj.getString("Rated");
Release = jsonObj.getString("Released");
Runtime = jsonObj.getString("Runtime");
Plot = jsonObj.getString("Plot");
Filename = jsonObj.getString("Filename");
//The genre and actors are arrays of items so we will need to iterate through and pull each value
Genre = new Vector<String>();
JSONArray jaGenre = jsonObj.getJSONArray("Genre");
//System.out.println("Getting the genre");
for (int i = 0; i < jaGenre.length(); i++){
//System.out.println(jsonObj.getJSONArray("Genre").getString(i));
Genre.add(jaGenre.getString(i));
}
Actors = new Vector<String>();
JSONArray jaActors = jsonObj.getJSONArray("Actors");
for (int i = 0; i< jaActors.length(); i++){
Actors.add(jaActors.getString(i));
}
}
//Many getters and setters for all of the descriptors of the movies
public void setTitle(String newTitle){
Title = newTitle;
}
public void setRating(String newRating){
Rating = newRating;
}
public void setRelease(String newRelease){
Release = newRelease;
}
public void setRuntime(String newRuntime){
Runtime = newRuntime;
}
public void setPlot(String newPlot){
Plot = newPlot;
}
public void setFilename(String newFilename){
Filename = newFilename;
}
public void setGenre(Vector<String> newGenre){
Genre = newGenre;
}
public void setActors(Vector<String> newActors){
Actors = newActors;
}
/**
* gets the title of a movie
* @return
*/
public String getTitle(){
return Title;
}
public String getRating(){
return Rating;
}
public String getRelease(){
return Release;
}
public String getRuntime(){
return Runtime;
}
public String getPlot(){
return Plot;
}
public String getFilename(){
return Filename;
}
public Vector<String> getGenre(){
/*
String genre = "";
for (int i = 0; i < Genre.size(); i++){
genre += Genre.get(i);
}
return genre;
*/
return Genre;
}
public Vector<String> getActors(){
/*
String actors = "";
for (int i = 0; i < Actors.size(); i++){
actors += Actors.get(i);
}
return actors;
*/
return Actors;
}
/**
* converts all of the information of a movie into a string to be printable to the console
* @return
*/
public String toString(){
String genre = "",actors = "";
for (int i = 0; i < Genre.size(); i++){
genre += Genre.get(i);
if (i != Genre.size() - 1){
genre += ", ";
}
}
for (int i = 0; i < Actors.size(); i++){
actors += Actors.get(i);
if (i != Actors.size() - 1){
actors += ", ";
}
}
String returnString =
"\nTitle: " + Title +
"\nRating: " + Rating +
"\nRelease: " + Release +
"\nRuntime: " + Runtime +
"\nPlot: " + Plot +
"\nFilename: " + Filename +
"\nGenre: " + genre +
"\nActors: " + actors + "\n";
return returnString;
}
/**
* Converts all of the movie informaton back into a JSON object so it can be written to a
* file
*/
public JSONObject toJson(){
JSONObject jsonObj = new JSONObject();
jsonObj.put("Title", Title);
jsonObj.put("Rated", Rating);
jsonObj.put("Released", Release);
jsonObj.put("Runtime", Runtime);
jsonObj.put("Plot", Plot);
jsonObj.put("Filename", Filename);
jsonObj.put("Genre", Genre);
jsonObj.put("Actors", Actors);
return jsonObj;
}
}
| |
/*
* Copyright 2015 Suprema(biostar2@suprema.co.kr)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.supremainc.biostar2.widget.popup;
import android.app.Activity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
import android.graphics.Bitmap;
import android.os.Handler;
import android.os.Looper;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.Toast;
import com.supremainc.biostar2.R;
import com.supremainc.biostar2.impl.OnSingleClickListener;
import com.supremainc.biostar2.meta.Setting;
import com.supremainc.biostar2.view.StyledTextView;
import com.supremainc.biostar2.widget.CustomDialog;
public class Popup {
private Activity mContext;
private CustomDialog mWaitPopup = null;
private CustomDialog mDialog;
private Handler mHandler;
private int mValue;
private OnCancelListener cancelListener = new OnCancelListener() {
@Override
public void onCancel(DialogInterface mDialog) {
}
};
public Popup(Activity mContext) {
this.mContext = mContext;
mHandler = new Handler(Looper.getMainLooper());
}
public void dismiss() {
if (mDialog != null && mDialog.isShowing()) {
mDialog.dismiss();
}
dismissWiat();
}
public boolean dismissWiat() {
if (mWaitPopup == null) {
return true;
}
if (mContext.isFinishing()) {
return false;
}
if (mWaitPopup.isShowing()) {
try {
mWaitPopup.dismiss();
} catch (IllegalArgumentException e) {
}
}
return false;
}
public int dpToPx(double dp) {
float scale = mContext.getResources().getDisplayMetrics().density;
return (int) (dp * scale + 0.5f);
}
public boolean isShownPopup() {
if (mDialog == null) {
return false;
}
if (mContext.isFinishing()) {
return false;
}
if (mDialog.isShowing()) {
return true;
}
return false;
}
public boolean isShownWait() {
if (mWaitPopup == null) {
return false;
}
if (mContext.isFinishing()) {
return false;
}
if (mWaitPopup.isShowing()) {
return true;
}
return false;
}
public int pxToDp(int px) {
DisplayMetrics displayMetrics = mContext.getResources().getDisplayMetrics();
return Math.round(px / (displayMetrics.xdpi / DisplayMetrics.DENSITY_DEFAULT));
}
public void show(PopupType type, String content, final OnPopupClickListener listener, String positive, String negative) {
show(type, null, null, content, listener, positive, negative, true);
}
public void show(PopupType type, String title, String content, final OnPopupClickListener listener, String positive, String negative) {
show(type, null, title, content, listener, positive, negative, true);
}
public void show(PopupType type, String title, String content, final OnPopupClickListener listener, String positive, String negative, boolean cancelable) {
show(type, null, title, content, listener, positive, negative, cancelable);
}
public int getValue() {
return mValue;
}
public void show(PopupType type, Bitmap bmp, String title, String content, final OnPopupClickListener listener, String positive, String negative, boolean cancelable) {
if (mContext.isFinishing()) {
return;
}
dismiss();
mValue = 0;
mDialog = new CustomDialog(mContext);
LayoutInflater inflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
ViewGroup layout = null;
String errrorCode = null;
if (content != null && !content.isEmpty()) {
if (content.contains(Setting.ERROR_MESSAGE_SPLITE)) {
String[] temp = content.split(Setting.ERROR_MESSAGE_SPLITE);
content= temp[0];
errrorCode = temp[1];
if (content.isEmpty() || content.equals("null")) {
content= mContext.getString(R.string.fail);
}
}
}
switch (type) {
case CARD_CONFIRM:
layout = (ViewGroup) inflater.inflate(R.layout.popup_card, null);
break;
case FACE_CONFIRM:
layout = (ViewGroup) inflater.inflate(R.layout.popup_face, null);
View container = layout.findViewById(R.id.container_select);
container.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mValue == 0) {
ImageView iv = (ImageView) v.findViewById(R.id.set_select);
iv.setImageResource(R.drawable.check_box);
mValue = 1;
} else {
ImageView iv = (ImageView) v.findViewById(R.id.set_select);
iv.setImageResource(R.drawable.check_box_blank);
mValue = 0;
}
}
});
break;
default:
layout = (ViewGroup) inflater.inflate(R.layout.popup_common, null);
break;
}
final StyledTextView contentView = (StyledTextView) layout.findViewById(R.id.content);
final OnSingleClickListener onClickListener = new OnSingleClickListener() {
@Override
public void onSingleClick(View v) {
if (v.getId() == R.id.type) {
String errorCode = (String)v.getTag();
ToastPopup.makeText(mContext,errorCode,Toast.LENGTH_LONG).show();
return;
}
if (mDialog != null) {
mDialog.dismiss();
}
switch (v.getId()) {
case R.id.positive:
if (listener != null) {
listener.OnPositive();
}
break;
case R.id.negative:
if (listener != null) {
listener.OnNegative();
}
break;
}
}
};
if (errrorCode != null) {
layout.findViewById(R.id.type).setOnClickListener(onClickListener);
layout.findViewById(R.id.type).setTag(errrorCode);
}
StyledTextView positiveView = (StyledTextView) layout.findViewById(R.id.positive);
positiveView.setOnClickListener(onClickListener);
StyledTextView negativeView = (StyledTextView) layout.findViewById(R.id.negative);
negativeView.setOnClickListener(onClickListener);
StyledTextView titleView = (StyledTextView) layout.findViewById(R.id.title_text);
if (title != null) {
titleView.setText(title);
}
final LinearLayout mainView = (LinearLayout) layout.findViewById(R.id.main_container);
mainView.setTag(type);
ImageView popupType = (ImageView) layout.findViewById(R.id.type);
boolean isRunHeight = true;
switch (type) {
case FACE_CONFIRM:
isRunHeight = false;
if (bmp != null) {
popupType.setImageBitmap(bmp);
} else {
popupType.setImageResource(R.drawable.user_face);
}
break;
case CONFIRM:
popupType.setImageResource(R.drawable.popup_check_ic);
break;
case ALARM:
popupType.setImageResource(R.drawable.popup_sound_ic);
break;
case ALERT:
popupType.setImageResource(R.drawable.popup_error_ic);
break;
case INFO:
popupType.setImageResource(R.drawable.popup_info_ic);
break;
case DOOR:
popupType.setImageResource(R.drawable.popup_door_ic);
break;
case FIRE:
popupType.setImageResource(R.drawable.popup_fire_ic);
break;
case CARD:
popupType.setImageResource(R.drawable.user_card_number_ic);
break;
case FINGERPRINT:
popupType.setImageResource(R.drawable.user_fp1);
break;
case FACE:
popupType.setImageResource(R.drawable.user_face);
break;
case FINGERPRINT_AGAGIN:
popupType.setImageResource(R.drawable.user_fp2);
break;
case FINGERPRINT_CONFIRM:
popupType.setImageResource(R.drawable.user_fp3);
break;
case CARD_CONFIRM:
isRunHeight = false;
break;
case NONE:
popupType.setVisibility(View.GONE);
break;
}
if (content == null) {
content = "";
}
contentView.setText(content);
if ((positive == null && negative == null) && cancelable) {
positive = mContext.getResources().getString(R.string.ok);
}
LinearLayout devider = (LinearLayout) layout.findViewById(R.id.devider);
if (positive == null || negative == null) {
devider.setVisibility(View.GONE);
} else {
devider.setVisibility(View.VISIBLE);
}
positiveView.setText(positive);
negativeView.setText(negative);
if (positive == null) {
positiveView.setVisibility(View.GONE);
}
if (negative == null) {
negativeView.setVisibility(View.GONE);
}
final ScrollView contentContainer = (ScrollView) layout.findViewById(R.id.scroll);
mDialog.setLayout(layout);
if (isRunHeight) {
mainView.setVisibility(View.INVISIBLE);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e("popup", "line2:" + contentView.getLineCount());
int count = contentView.getLineCount();
if (count < 1) {
count = 4;
}
ImageView popupType = (ImageView) mainView.findViewById(R.id.type);
if (popupType.getVisibility() == View.GONE) {
int dp = count * 25;
if (dp > 229) {
dp = 229;
}
Log.e("popup", "dp:" + dp);
contentContainer.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, dpToPx(dp)));
} else {
if (count > 4) {
count = 4;
}
contentContainer.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, dpToPx(157 + count * 18)));
}
mainView.setVisibility(View.VISIBLE);
}
}, 500);
}
if (mContext.isFinishing()) {
return;
}
mDialog.show();
}
public void showWait(OnCancelListener cancelListener) {
if (mContext.isFinishing()) {
return;
}
if (dismissWiat()) {
mWaitPopup = new CustomDialog(mContext);
LayoutInflater inflater = (LayoutInflater) mContext.getApplicationContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
ViewGroup layout = (ViewGroup) inflater.inflate(R.layout.popup_wait, null);
mWaitPopup.setLayout(layout);
}
if (cancelListener != null) {
mWaitPopup.setCancelable(true);
mWaitPopup.setOnCancelListener(cancelListener);
} else {
mWaitPopup.setCancelable(false);
mWaitPopup.setOnCancelListener(null);
}
mWaitPopup.findViewById(R.id.waitpopup_container).setVisibility(View.VISIBLE);
mWaitPopup.show();
}
public void showWait(boolean cancel) {
if (cancel) {
showWait(cancelListener);
} else {
showWait(null);
}
}
public enum PopupType {
NONE, CONFIRM, ALARM, ALERT, INFO, DOOR, FIRE, CARD, CARD_CONFIRM, FINGERPRINT, FINGERPRINT_AGAGIN, FACE, FINGERPRINT_CONFIRM, FACE_CONFIRM
}
public interface OnPopupClickListener {
public void OnNegative();
public void OnPositive();
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.base64.Base64;
import io.netty.util.CharsetUtil;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.SystemPropertyUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import org.apache.tomcat.jni.CertificateVerifier;
import org.apache.tomcat.jni.Pool;
import org.apache.tomcat.jni.SSL;
import org.apache.tomcat.jni.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509ExtendedTrustManager;
import javax.net.ssl.X509TrustManager;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
import static io.netty.handler.ssl.ApplicationProtocolConfig.SelectorFailureBehavior;
import static io.netty.handler.ssl.ApplicationProtocolConfig.SelectedListenerFailureBehavior;
public abstract class OpenSslContext extends SslContext {
private static final byte[] BEGIN_CERT = "-----BEGIN CERTIFICATE-----\n".getBytes(CharsetUtil.US_ASCII);
private static final byte[] END_CERT = "\n-----END CERTIFICATE-----\n".getBytes(CharsetUtil.US_ASCII);
private static final byte[] BEGIN_PRIVATE_KEY = "-----BEGIN PRIVATE KEY-----\n".getBytes(CharsetUtil.US_ASCII);
private static final byte[] END_PRIVATE_KEY = "\n-----END PRIVATE KEY-----\n".getBytes(CharsetUtil.US_ASCII);
private static final InternalLogger logger = InternalLoggerFactory.getInstance(OpenSslContext.class);
/**
* To make it easier for users to replace JDK implemention with OpenSsl version we also use
* {@code jdk.tls.rejectClientInitiatedRenegotiation} to allow disabling client initiated renegotiation.
* Java8+ uses this system property as well.
*
* See also <a href="http://blog.ivanristic.com/2014/03/ssl-tls-improvements-in-java-8.html">
* Significant SSL/TLS improvements in Java 8</a>
*/
private static final boolean JDK_REJECT_CLIENT_INITIATED_RENEGOTIATION =
SystemPropertyUtil.getBoolean("jdk.tls.rejectClientInitiatedRenegotiation", false);
private static final List<String> DEFAULT_CIPHERS;
// TODO: Maybe make configurable ?
protected static final int VERIFY_DEPTH = 10;
/** The OpenSSL SSL_CTX object */
protected volatile long ctx;
long aprPool;
@SuppressWarnings({ "unused", "FieldMayBeFinal" })
private volatile int aprPoolDestroyed;
private volatile boolean rejectRemoteInitiatedRenegotiation;
private final List<String> unmodifiableCiphers;
private final long sessionCacheSize;
private final long sessionTimeout;
private final OpenSslEngineMap engineMap = new DefaultOpenSslEngineMap();
private final OpenSslApplicationProtocolNegotiator apn;
private final int mode;
private final Certificate[] keyCertChain;
private final ClientAuth clientAuth;
static final OpenSslApplicationProtocolNegotiator NONE_PROTOCOL_NEGOTIATOR =
new OpenSslApplicationProtocolNegotiator() {
@Override
public ApplicationProtocolConfig.Protocol protocol() {
return ApplicationProtocolConfig.Protocol.NONE;
}
@Override
public List<String> protocols() {
return Collections.emptyList();
}
@Override
public SelectorFailureBehavior selectorFailureBehavior() {
return SelectorFailureBehavior.CHOOSE_MY_LAST_PROTOCOL;
}
@Override
public SelectedListenerFailureBehavior selectedListenerFailureBehavior() {
return SelectedListenerFailureBehavior.ACCEPT;
}
};
static {
List<String> ciphers = new ArrayList<String>();
// XXX: Make sure to sync this list with JdkSslEngineFactory.
Collections.addAll(
ciphers,
"ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-RSA-AES128-SHA",
"ECDHE-RSA-AES256-SHA",
"AES128-GCM-SHA256",
"AES128-SHA",
"AES256-SHA",
"DES-CBC3-SHA");
DEFAULT_CIPHERS = Collections.unmodifiableList(ciphers);
if (logger.isDebugEnabled()) {
logger.debug("Default cipher suite (OpenSSL): " + ciphers);
}
}
OpenSslContext(Iterable<String> ciphers, CipherSuiteFilter cipherFilter, ApplicationProtocolConfig apnCfg,
long sessionCacheSize, long sessionTimeout, int mode, Certificate[] keyCertChain,
ClientAuth clientAuth)
throws SSLException {
this(ciphers, cipherFilter, toNegotiator(apnCfg), sessionCacheSize, sessionTimeout, mode, keyCertChain,
clientAuth);
}
OpenSslContext(Iterable<String> ciphers, CipherSuiteFilter cipherFilter,
OpenSslApplicationProtocolNegotiator apn, long sessionCacheSize,
long sessionTimeout, int mode, Certificate[] keyCertChain,
ClientAuth clientAuth) throws SSLException {
OpenSsl.ensureAvailability();
if (mode != SSL.SSL_MODE_SERVER && mode != SSL.SSL_MODE_CLIENT) {
throw new IllegalArgumentException("mode most be either SSL.SSL_MODE_SERVER or SSL.SSL_MODE_CLIENT");
}
this.mode = mode;
this.clientAuth = isServer() ? checkNotNull(clientAuth, "clientAuth") : ClientAuth.NONE;
if (mode == SSL.SSL_MODE_SERVER) {
rejectRemoteInitiatedRenegotiation =
JDK_REJECT_CLIENT_INITIATED_RENEGOTIATION;
}
this.keyCertChain = keyCertChain == null ? null : keyCertChain.clone();
final List<String> convertedCiphers;
if (ciphers == null) {
convertedCiphers = null;
} else {
convertedCiphers = new ArrayList<String>();
for (String c: ciphers) {
if (c == null) {
break;
}
String converted = CipherSuiteConverter.toOpenSsl(c);
if (converted != null) {
c = converted;
}
convertedCiphers.add(c);
}
}
unmodifiableCiphers = Arrays.asList(checkNotNull(cipherFilter, "cipherFilter").filterCipherSuites(
convertedCiphers, DEFAULT_CIPHERS, OpenSsl.availableCipherSuites()));
this.apn = checkNotNull(apn, "apn");
// Allocate a new APR pool.
aprPool = Pool.create(0);
// Create a new SSL_CTX and configure it.
boolean success = false;
try {
synchronized (OpenSslContext.class) {
try {
ctx = SSLContext.make(aprPool, SSL.SSL_PROTOCOL_ALL, mode);
} catch (Exception e) {
throw new SSLException("failed to create an SSL_CTX", e);
}
SSLContext.setOptions(ctx, SSL.SSL_OP_ALL);
SSLContext.setOptions(ctx, SSL.SSL_OP_NO_SSLv2);
SSLContext.setOptions(ctx, SSL.SSL_OP_NO_SSLv3);
SSLContext.setOptions(ctx, SSL.SSL_OP_CIPHER_SERVER_PREFERENCE);
SSLContext.setOptions(ctx, SSL.SSL_OP_SINGLE_ECDH_USE);
SSLContext.setOptions(ctx, SSL.SSL_OP_SINGLE_DH_USE);
SSLContext.setOptions(ctx, SSL.SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION);
// We need to enable SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER as the memory address may change between
// calling OpenSSLEngine.wrap(...).
// See https://github.com/netty/netty-tcnative/issues/100
SSLContext.setMode(ctx, SSLContext.getMode(ctx) | SSL.SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER);
/* List the ciphers that are permitted to negotiate. */
try {
SSLContext.setCipherSuite(ctx, CipherSuiteConverter.toOpenSsl(unmodifiableCiphers));
} catch (SSLException e) {
throw e;
} catch (Exception e) {
throw new SSLException("failed to set cipher suite: " + unmodifiableCiphers, e);
}
List<String> nextProtoList = apn.protocols();
/* Set next protocols for next protocol negotiation extension, if specified */
if (!nextProtoList.isEmpty()) {
String[] protocols = nextProtoList.toArray(new String[nextProtoList.size()]);
int selectorBehavior = opensslSelectorFailureBehavior(apn.selectorFailureBehavior());
switch (apn.protocol()) {
case NPN:
SSLContext.setNpnProtos(ctx, protocols, selectorBehavior);
break;
case ALPN:
SSLContext.setAlpnProtos(ctx, protocols, selectorBehavior);
break;
case NPN_AND_ALPN:
SSLContext.setNpnProtos(ctx, protocols, selectorBehavior);
SSLContext.setAlpnProtos(ctx, protocols, selectorBehavior);
break;
default:
throw new Error();
}
}
/* Set session cache size, if specified */
if (sessionCacheSize > 0) {
this.sessionCacheSize = sessionCacheSize;
SSLContext.setSessionCacheSize(ctx, sessionCacheSize);
} else {
// Get the default session cache size using SSLContext.setSessionCacheSize()
this.sessionCacheSize = sessionCacheSize = SSLContext.setSessionCacheSize(ctx, 20480);
// Revert the session cache size to the default value.
SSLContext.setSessionCacheSize(ctx, sessionCacheSize);
}
/* Set session timeout, if specified */
if (sessionTimeout > 0) {
this.sessionTimeout = sessionTimeout;
SSLContext.setSessionCacheTimeout(ctx, sessionTimeout);
} else {
// Get the default session timeout using SSLContext.setSessionCacheTimeout()
this.sessionTimeout = sessionTimeout = SSLContext.setSessionCacheTimeout(ctx, 300);
// Revert the session timeout to the default value.
SSLContext.setSessionCacheTimeout(ctx, sessionTimeout);
}
}
success = true;
} finally {
if (!success) {
destroy();
}
}
}
private static int opensslSelectorFailureBehavior(SelectorFailureBehavior behavior) {
switch (behavior) {
case NO_ADVERTISE:
return SSL.SSL_SELECTOR_FAILURE_NO_ADVERTISE;
case CHOOSE_MY_LAST_PROTOCOL:
return SSL.SSL_SELECTOR_FAILURE_CHOOSE_MY_LAST_PROTOCOL;
default:
throw new Error();
}
}
@Override
public final List<String> cipherSuites() {
return unmodifiableCiphers;
}
@Override
public final long sessionCacheSize() {
return sessionCacheSize;
}
@Override
public final long sessionTimeout() {
return sessionTimeout;
}
@Override
public ApplicationProtocolNegotiator applicationProtocolNegotiator() {
return apn;
}
@Override
public final boolean isClient() {
return mode == SSL.SSL_MODE_CLIENT;
}
@Override
public final SSLEngine newEngine(ByteBufAllocator alloc, String peerHost, int peerPort) {
return new OpenSslEngine(ctx, alloc, isClient(), sessionContext(), apn, engineMap,
rejectRemoteInitiatedRenegotiation, peerHost, peerPort, keyCertChain, clientAuth);
}
/**
* Returns a new server-side {@link SSLEngine} with the current configuration.
*/
@Override
public final SSLEngine newEngine(ByteBufAllocator alloc) {
return newEngine(alloc, null, -1);
}
/**
* Returns the pointer to the {@code SSL_CTX} object for this {@link OpenSslContext}.
* Be aware that it is freed as soon as the {@link #finalize()} method is called.
* At this point {@code 0} will be returned.
*
* @deprecated use {@link #sslCtxPointer()}
*/
@Deprecated
public final long context() {
return ctx;
}
/**
* Returns the stats of this context.
* @deprecated use {@link #sessionContext#stats()}
*/
@Deprecated
public final OpenSslSessionStats stats() {
return sessionContext().stats();
}
/**
* Specify if remote initiated renegotiation is supported or not. If not supported and the remote side tries
* to initiate a renegotiation a {@link SSLHandshakeException} will be thrown during decoding.
*/
public void setRejectRemoteInitiatedRenegotiation(boolean rejectRemoteInitiatedRenegotiation) {
this.rejectRemoteInitiatedRenegotiation = rejectRemoteInitiatedRenegotiation;
}
@Override
@SuppressWarnings("FinalizeDeclaration")
protected final void finalize() throws Throwable {
super.finalize();
destroy();
}
/**
* Sets the SSL session ticket keys of this context.
* @deprecated use {@link OpenSslSessionContext#setTicketKeys(byte[])}
*/
@Deprecated
public final void setTicketKeys(byte[] keys) {
sessionContext().setTicketKeys(keys);
}
@Override
public abstract OpenSslSessionContext sessionContext();
/**
* Returns the pointer to the {@code SSL_CTX} object for this {@link OpenSslContext}.
* Be aware that it is freed as soon as the {@link #finalize()} method is called.
* At this point {@code 0} will be returned.
*/
public final long sslCtxPointer() {
return ctx;
}
protected final void destroy() {
synchronized (OpenSslContext.class) {
if (ctx != 0) {
SSLContext.free(ctx);
ctx = 0;
}
// Guard against multiple destroyPools() calls triggered by construction exception and finalize() later
if (aprPool != 0) {
Pool.destroy(aprPool);
aprPool = 0;
}
}
}
protected static X509Certificate[] certificates(byte[][] chain) {
X509Certificate[] peerCerts = new X509Certificate[chain.length];
for (int i = 0; i < peerCerts.length; i++) {
peerCerts[i] = new OpenSslX509Certificate(chain[i]);
}
return peerCerts;
}
protected static X509TrustManager chooseTrustManager(TrustManager[] managers) {
for (TrustManager m : managers) {
if (m instanceof X509TrustManager) {
return (X509TrustManager) m;
}
}
throw new IllegalStateException("no X509TrustManager found");
}
/**
* Translate a {@link ApplicationProtocolConfig} object to a
* {@link OpenSslApplicationProtocolNegotiator} object.
* @param config The configuration which defines the translation
* @return The results of the translation
*/
static OpenSslApplicationProtocolNegotiator toNegotiator(ApplicationProtocolConfig config) {
if (config == null) {
return NONE_PROTOCOL_NEGOTIATOR;
}
switch (config.protocol()) {
case NONE:
return NONE_PROTOCOL_NEGOTIATOR;
case ALPN:
case NPN:
case NPN_AND_ALPN:
switch (config.selectedListenerFailureBehavior()) {
case CHOOSE_MY_LAST_PROTOCOL:
case ACCEPT:
switch (config.selectorFailureBehavior()) {
case CHOOSE_MY_LAST_PROTOCOL:
case NO_ADVERTISE:
return new OpenSslDefaultApplicationProtocolNegotiator(
config);
default:
throw new UnsupportedOperationException(
new StringBuilder("OpenSSL provider does not support ")
.append(config.selectorFailureBehavior())
.append(" behavior").toString());
}
default:
throw new UnsupportedOperationException(
new StringBuilder("OpenSSL provider does not support ")
.append(config.selectedListenerFailureBehavior())
.append(" behavior").toString());
}
default:
throw new Error();
}
}
static boolean useExtendedTrustManager(X509TrustManager trustManager) {
return PlatformDependent.javaVersion() >= 7 && trustManager instanceof X509ExtendedTrustManager;
}
abstract class AbstractCertificateVerifier implements CertificateVerifier {
@Override
public final boolean verify(long ssl, byte[][] chain, String auth) {
X509Certificate[] peerCerts = certificates(chain);
final OpenSslEngine engine = engineMap.remove(ssl);
try {
verify(engine, peerCerts, auth);
return true;
} catch (Throwable cause) {
logger.debug("verification of certificate failed", cause);
SSLHandshakeException e = new SSLHandshakeException("General OpenSslEngine problem");
e.initCause(cause);
engine.handshakeException = e;
}
return false;
}
abstract void verify(OpenSslEngine engine, X509Certificate[] peerCerts, String auth) throws Exception;
}
private static final class DefaultOpenSslEngineMap implements OpenSslEngineMap {
private final Map<Long, OpenSslEngine> engines = PlatformDependent.newConcurrentHashMap();
@Override
public OpenSslEngine remove(long ssl) {
return engines.remove(ssl);
}
@Override
public void add(OpenSslEngine engine) {
engines.put(engine.sslPointer(), engine);
}
}
/**
* Return the pointer to a <a href="https://www.openssl.org/docs/crypto/BIO_get_mem_ptr.html">in-memory BIO</a>
* or {@code 0} if the {@code key} is {@code null}. The BIO contains the content of the {@code key}.
*/
static long toBIO(PrivateKey key) throws Exception {
if (key == null) {
return 0;
}
ByteBuf buffer = Unpooled.directBuffer();
try {
buffer.writeBytes(BEGIN_PRIVATE_KEY);
ByteBuf wrappedBuf = Unpooled.wrappedBuffer(key.getEncoded());
final ByteBuf encodedBuf;
try {
encodedBuf = Base64.encode(wrappedBuf, true);
try {
buffer.writeBytes(encodedBuf);
} finally {
encodedBuf.release();
}
} finally {
wrappedBuf.release();
}
buffer.writeBytes(END_PRIVATE_KEY);
return newBIO(buffer);
} finally {
buffer.release();
}
}
/**
* Return the pointer to a <a href="https://www.openssl.org/docs/crypto/BIO_get_mem_ptr.html">in-memory BIO</a>
* or {@code 0} if the {@code certChain} is {@code null}. The BIO contains the content of the {@code certChain}.
*/
static long toBIO(X509Certificate[] certChain) throws Exception {
if (certChain == null) {
return 0;
}
ByteBuf buffer = Unpooled.directBuffer();
try {
for (X509Certificate cert: certChain) {
buffer.writeBytes(BEGIN_CERT);
ByteBuf wrappedBuf = Unpooled.wrappedBuffer(cert.getEncoded());
try {
ByteBuf encodedBuf = Base64.encode(wrappedBuf, true);
try {
buffer.writeBytes(encodedBuf);
} finally {
encodedBuf.release();
}
} finally {
wrappedBuf.release();
}
buffer.writeBytes(END_CERT);
}
return newBIO(buffer);
} finally {
buffer.release();
}
}
private static long newBIO(ByteBuf buffer) throws Exception {
long bio = SSL.newMemBIO();
int readable = buffer.readableBytes();
if (SSL.writeToBIO(bio, OpenSsl.memoryAddress(buffer), readable) != readable) {
SSL.freeBIO(bio);
throw new IllegalStateException("Could not write data to memory BIO");
}
return bio;
}
}
| |
package macbury.forge.octree;
import com.badlogic.gdx.math.Frustum;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.math.collision.BoundingBox;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Disposable;
import com.badlogic.gdx.utils.Pool;
import macbury.forge.octree.query.OctreeEach;
import macbury.forge.octree.query.OctreeQuery;
/**
* Created by macbury on 20.10.14.
*/
public class OctreeNode implements Pool.Poolable, Disposable {
public static int MAX_LEVELS = 6;
private int maxObjects = 20;
private int level;
private Array<OctreeObject> objects;
private Array<OctreeNode> nodes;
private BoundingBox bounds;
private final Vector3 tempA;
private final Vector3 tempB;
private final Vector3 tempC;
private OctreeNode parent;
private Vector3 center = new Vector3();
private Vector3 min = new Vector3();
private Vector3 max = new Vector3();
private BoundingBox tempBox = new BoundingBox();
private static final Pool<OctreeNode> octreeNodePool = new Pool<OctreeNode>() {
@Override
protected OctreeNode newObject() {
return new OctreeNode();
}
};
public OctreeNode() {
this.tempA = new Vector3();
this.tempB = new Vector3();
this.tempC = new Vector3();
this.level = 0;
this.objects = new Array<OctreeObject>();
this.nodes = new Array<OctreeNode>();
this.bounds = new BoundingBox();
this.parent = null;
this.maxObjects = 24;
clear();
}
public int getIndex(OctreeObject object) {
object.getBoundingBox(tempBox);
return getIndex(tempBox);
}
private int getIndex(Vector3 point) {
int index = -1;
if (haveNodes()) {
for (int i = 0; i < nodes.size; i++) {
OctreeNode node = nodes.get(i);
if (node.getBounds().contains(point)) {
index = i;
break;
}
}
}
return index;
}
public int getIndex(BoundingBox pRect) {
int index = -1;
if (haveNodes()) {
for (int i = 0; i < nodes.size; i++) {
OctreeNode node = nodes.get(i);
if (node.contains(pRect)) {
index = i;
break;
}
}
}
return index;
}
public boolean contains(BoundingBox pRect) {
return bounds.contains(pRect);
}
private void insertIntoProperNode(OctreeObject objectToInsert) {
int index = getIndex(objectToInsert);
if (index != -1) {
nodes.get(index).insert(objectToInsert);
return;
} else {
objects.add(objectToInsert);
}
}
public void insert(Array<OctreeObject> objectsToInsert) {
for (int i = 0; i < objectsToInsert.size; i++) {
insert(objectsToInsert.get(i));
}
}
public void insert(OctreeObject objectToInsert) {
if (haveNodes()) {
insertIntoProperNode(objectToInsert);
} else {
objects.add(objectToInsert);
objectToInsert.setOctreeParent(this);
if (objects.size > maxObjects && level < MAX_LEVELS) {
if (!haveNodes())
split();
int i = 0;
while (i < objects.size) {
OctreeObject currentObject = objects.get(i);
int index = getIndex(currentObject);
if (index != -1) {
objects.removeValue(currentObject, false);
nodes.get(index).insert(currentObject);
} else {
i++;
}
}
}
}
}
public boolean remove(OctreeObject object) {
int index = getIndex(object);
if (index == -1) {
return objects.removeValue(object, true);
} else {
return nodes.get(index).remove(object);
}
}
public OctreeNode getNode(OctreePart part) {
return nodes.get(part.getIndex());
}
private void split() {
center = bounds.getCenter(center);
bounds.getMin(min);
bounds.getMax(max);
buildNode(tempA.set(max).set(max.x, max.y, min.z), center, OctreePart.FrontTopLeft);
buildNode(tempA.set(min).set(min.x, max.y, min.z), center, OctreePart.FrontTopRight);
buildNode(min, center, OctreePart.FrontBottomRight);
buildNode(tempA.set(max).set(max.x, min.y, min.z), center, OctreePart.FrontTopLeft);
buildNode(tempA.set(min).set(min.x, max.y, max.z), center, OctreePart.BackTopRight);
buildNode(max, center, OctreePart.BackTopLeft);
buildNode(tempA.set(max).set(min.x, min.y, max.z), center, OctreePart.BackBottomRight);
buildNode(tempA.set(max).set(max.x, min.y, max.z), center, OctreePart.BackBottomLeft);
}
private void buildNode(Vector3 min, Vector3 max, OctreePart part) {
tempBox.set(min, max);
OctreeNode nodeQuadrant = OctreeNode.node(level, tempBox);
nodeQuadrant.setParent(this);
nodeQuadrant.setMaxObjects(maxObjects);
nodes.add(nodeQuadrant);
}
public void getDimension(Vector3 out) {
bounds.getDimensions(out);
}
public float getWidth() {
bounds.getDimensions(tempA);
return tempA.x;
}
public float getHeight() {
bounds.getDimensions(tempA);
return tempA.y;
}
public float getDepth() {
bounds.getDimensions(tempA);
return tempA.z;
}
public OctreeNode getParent() {
return parent;
}
public void setParent(OctreeNode parent) {
this.parent = parent;
}
public boolean haveNodes() {
return nodes.size != 0;
}
public int getLevel() {
return level;
}
public BoundingBox getBounds() {
return bounds;
}
public void setBounds(BoundingBox box) {
bounds.set(box);
clear();
}
public void clear() {
for (OctreeObject o : objects) {
o.setOctreeParent(null);
}
objects.clear();
for (OctreeNode node : nodes) {
octreeNodePool.free(node);
}
nodes.clear();
}
@Override
public void reset() {
this.level = 0;
this.bounds.set(Vector3.Zero, Vector3.Zero);
parent = null;
clear();
}
public static OctreeNode root() {
OctreeNode node = octreeNodePool.obtain();
return node;
}
public static OctreeNode node(int parentLevel, BoundingBox box) {
OctreeNode node = octreeNodePool.obtain();
node.setLevel(parentLevel + 1);
node.setBounds(box);
return node;
}
@Override
public void dispose() {
clear();
}
public void setLevel(int level) {
this.level = level;
}
public void bottomNodes(Array<OctreeNode> out) {
if (haveNodes()) {
for(OctreeNode node : nodes) {
node.bottomNodes(out);
}
} else {
out.add(this);
}
}
public void retriveNodes(Array<OctreeNode> outNodes, Frustum frustum) {
if (haveNodes()) {
for(OctreeNode node : nodes) {
node.retriveNodes(outNodes, frustum);
}
} else {
if (frustum.boundsInFrustum(this.getBounds())) {
outNodes.add(this);
}
}
}
public void retrieve(Array<OctreeObject> returnObjects, Frustum frustum, boolean checkObjectsToo) {
if (haveNodes()) {
for(OctreeNode node : nodes) {
if (frustum.boundsInFrustum(node.getBounds())) {
node.retrieve(returnObjects, frustum, checkObjectsToo);
}
}
}
if (checkObjectsToo) {
for (OctreeObject object : objects) {
object.getBoundingBox(tempBox);
if (frustum.boundsInFrustum(tempBox)) {
returnObjects.add(object);
}
}
} else {
returnObjects.addAll(objects);
}
}
public void retrieve(Array<OctreeObject> returnObjects, BoundingBox object) {
int index = getIndex(object);
if (index != -1 && haveNodes()) {
nodes.get(index).retrieve(returnObjects, object);
}
returnObjects.addAll(objects);
}
/**
* Return objects that have bounding box containg point
* @param returnObjects
* @param point
*/
public void retrieve(Array<OctreeObject> returnObjects, Vector3 point) {
int index = getIndex(point);
if (index != -1 && haveNodes()) {
nodes.get(index).retrieve(returnObjects, point);
}
for (int i = 0; i < objects.size; i++) {
OctreeObject octreeObject = objects.get(i);
octreeObject.getBoundingBox(tempBox);
if (tempBox.contains(point)) {
returnObjects.add(octreeObject);
}
}
}
public void retrieve(Array<OctreeObject> returnObjects, OctreeObject object) {
object.getBoundingBox(tempBox);
retrieve(returnObjects, tempBox);
}
public void setMaxObjects(int maxObjects) {
this.maxObjects = maxObjects;
}
public void retrieve(Array<OctreeObject> returnObjects, OctreeQuery query) {
if (haveNodes()) {
for(OctreeNode node : nodes) {
if (query.checkNode(node)) {
node.retrieve(returnObjects, query);
}
}
}
for (OctreeObject object : objects) {
if (query.checkObject(object)) {
returnObjects.add(object);
}
}
}
}
| |
package org.nem.core.serialization;
import net.minidev.json.*;
import org.hamcrest.core.*;
import org.junit.*;
import org.nem.core.test.*;
import java.util.*;
import java.util.function.Function;
public class SerializableListTest {
//region Constructors
@Test
public void ctorCapacityHasNoImpactOnSize() {
// Act:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(100);
// Assert:
Assert.assertThat(list.size(), IsEqual.equalTo(0));
Assert.assertThat(list.getLabel(), IsEqual.equalTo("data"));
}
@Test
public void ctorCapacityCanSpecifyCustomLabel() {
// Act:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(100, "items");
// Assert:
Assert.assertThat(list.size(), IsEqual.equalTo(0));
Assert.assertThat(list.getLabel(), IsEqual.equalTo("items"));
}
@Test
public void ctorListInitializesSerializableListWithItems() {
// Arrange:
final List<MockSerializableEntity> rawList = new ArrayList<>();
rawList.add(new MockSerializableEntity());
rawList.add(new MockSerializableEntity());
// Act:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(rawList);
// Assert:
Assert.assertThat(list.size(), IsEqual.equalTo(2));
Assert.assertThat(list.get(0), IsSame.sameInstance(rawList.get(0)));
Assert.assertThat(list.get(1), IsSame.sameInstance(rawList.get(1)));
Assert.assertThat(list.getLabel(), IsEqual.equalTo("data"));
}
@Test
public void ctorListCanSpecifyCustomLabel() {
// Arrange:
final List<MockSerializableEntity> rawList = new ArrayList<>();
rawList.add(new MockSerializableEntity());
rawList.add(new MockSerializableEntity());
// Act:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(rawList, "items");
// Assert:
Assert.assertThat(list.size(), IsEqual.equalTo(2));
Assert.assertThat(list.get(0), IsSame.sameInstance(rawList.get(0)));
Assert.assertThat(list.get(1), IsSame.sameInstance(rawList.get(1)));
Assert.assertThat(list.getLabel(), IsEqual.equalTo("items"));
}
@Test
public void ctorListIsDetachedFromSerializableList() {
// Arrange:
final List<MockSerializableEntity> rawList = new ArrayList<>();
rawList.add(new MockSerializableEntity());
rawList.add(new MockSerializableEntity());
// Act:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(rawList);
rawList.add(new MockSerializableEntity());
// Assert:
Assert.assertThat(rawList.size(), IsEqual.equalTo(3));
Assert.assertThat(list.size(), IsEqual.equalTo(2));
}
@Test(expected = IllegalArgumentException.class)
public void ctorListCannotContainNullItems() {
// Arrange:
final List<MockSerializableEntity> rawList = new ArrayList<>();
rawList.add(new MockSerializableEntity());
rawList.add(null);
rawList.add(new MockSerializableEntity());
// Act:
new SerializableList<>(rawList);
}
// endregion
// region Add
@Test
public void addAddsItemsToList() {
// Arrange:
final MockSerializableEntity entity1 = new MockSerializableEntity();
final MockSerializableEntity entity2 = new MockSerializableEntity();
final MockSerializableEntity entity3 = new MockSerializableEntity();
final SerializableList<MockSerializableEntity> list = new SerializableList<>(0);
// Act:
list.add(entity1);
list.add(entity2);
list.add(entity3);
// Assert:
Assert.assertThat(list.size(), IsEqual.equalTo(3));
Assert.assertThat(list.get(0), IsEqual.equalTo(entity1));
Assert.assertThat(list.get(1), IsEqual.equalTo(entity2));
Assert.assertThat(list.get(2), IsEqual.equalTo(entity3));
}
@Test(expected = IllegalArgumentException.class)
public void cannotAddNullItemToList() {
// Arrange:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(0);
// Act:
list.add(null);
}
// endregion
// region FindFirst simple
@Test
public void canCompareEmptyChains() {
// Arrange:
final SerializableList<MockSerializableEntity> list1 = new SerializableList<>(10);
final SerializableList<MockSerializableEntity> list2 = new SerializableList<>(20);
// Assert:
assertListComparison(list1, list2, 0, true);
}
@Test
public void canCompareWithSelf() {
// Arrange:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(10);
list.add(new MockSerializableEntity());
list.add(new MockSerializableEntity());
// Assert:
assertListComparison(list, list, 2, true);
}
@Test
public void canCompareWithSelfUsingAdd() {
// Arrange:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(10);
list.add(new MockSerializableEntity());
list.add(new MockSerializableEntity());
// Assert:
assertListComparison(list, list, 2, true);
}
// endregion
// region FindFirst two chains
@Test
public void canCompareTwoConsistentChains() {
// Arrange:
final SerializableList<MockSerializableEntity> list1 = new SerializableList<>(10);
list1.add(new MockSerializableEntity(5, "foo", 6));
list1.add(new MockSerializableEntity(8, "bar", 7));
final SerializableList<MockSerializableEntity> list2 = new SerializableList<>(10);
list2.add(new MockSerializableEntity(5, "foo", 6));
list2.add(new MockSerializableEntity(8, "bar", 7));
// Assert:
assertListComparison(list1, list2, 2, true);
}
@Test
public void canCompareConsistentChainsWithDifferentLengths() {
// Arrange:
final SerializableList<MockSerializableEntity> list1 = new SerializableList<>(10);
list1.add(new MockSerializableEntity(5, "foo", 6));
final SerializableList<MockSerializableEntity> list2 = new SerializableList<>(10);
list2.add(new MockSerializableEntity(5, "foo", 6));
list2.add(new MockSerializableEntity(8, "bar", 7));
// Assert:
assertListComparison(list1, list2, 1, false);
}
@Test
public void canCompareInconsistentChains() {
// Arrange:
final SerializableList<MockSerializableEntity> list1 = new SerializableList<>(10);
list1.add(new MockSerializableEntity(5, "a", 6));
list1.add(new MockSerializableEntity(5, "b", 6));
list1.add(new MockSerializableEntity(5, "c", 6));
list1.add(new MockSerializableEntity(5, "d", 6));
list1.add(new MockSerializableEntity(5, "e", 6));
final SerializableList<MockSerializableEntity> list2 = new SerializableList<>(10);
list2.add(new MockSerializableEntity(5, "a", 6));
list2.add(new MockSerializableEntity(5, "b", 6));
list2.add(new MockSerializableEntity(5, "d", 6));
list2.add(new MockSerializableEntity(5, "e", 6));
// Assert:
assertListComparison(list1, list2, 2, false);
}
// endregion
//region asCollection
@Test
public void asCollectionReturnsRawCollection() {
// Arrange:
final List<MockSerializableEntity> rawList = Arrays.asList(
new MockSerializableEntity(12, "a", 12),
new MockSerializableEntity(4, "b", 4),
new MockSerializableEntity(122, "c", 122));
final SerializableList<MockSerializableEntity> list = new SerializableList<>(rawList);
// Assert:
Assert.assertThat(list.asCollection(), IsEquivalent.equivalentTo(rawList));
}
//endregion
//region Serialization
@Test
public void canSerializeListWithDefaultLabel() {
// Assert:
assertSerializedData(list -> {
final SerializableList<MockSerializableEntity> serializableList = new SerializableList<>(10);
list.forEach(serializableList::add);
return serializableList;
}, "data");
assertSerializedData(SerializableList::new, "data");
}
@Test
public void canSerializeListWithCustomLabel() {
// Assert:
assertSerializedData(list -> {
final SerializableList<MockSerializableEntity> serializableList = new SerializableList<>(10, "objects");
list.forEach(serializableList::add);
return serializableList;
}, "objects");
assertSerializedData(list -> new SerializableList<>(list, "objects"), "objects");
}
private static void assertSerializedData(
final Function<List<MockSerializableEntity>, SerializableList<MockSerializableEntity>> factory,
final String expectedArrayName) {
// Arrange:
final JsonSerializer serializer = new JsonSerializer();
final SerializableList<MockSerializableEntity> list1 = factory.apply(
Arrays.asList(new MockSerializableEntity(5, "foo", 6), new MockSerializableEntity(8, "bar", 7)));
// Act:
list1.serialize(serializer);
// Assert:
final JSONObject object = serializer.getObject();
Assert.assertThat(object.size(), IsEqual.equalTo(1));
final JSONArray dataArray = (JSONArray)object.get(expectedArrayName);
Assert.assertThat(dataArray.size(), IsEqual.equalTo(2));
Assert.assertThat(deserializeFromObject(dataArray.get(0)), IsEqual.equalTo(list1.get(0)));
Assert.assertThat(deserializeFromObject(dataArray.get(1)), IsEqual.equalTo(list1.get(1)));
}
@Test
public void canRoundTripList() {
// Arrange:
final SerializableList<MockSerializableEntity> originalList = new SerializableList<>(10);
originalList.add(new MockSerializableEntity(5, "foo", 6));
originalList.add(new MockSerializableEntity(8, "bar", 7));
// Act:
final Deserializer deserializer = Utils.roundtripSerializableEntity(originalList, null);
final SerializableList<MockSerializableEntity> list =
new SerializableList<>(deserializer, MockSerializableEntity::new);
// Assert:
Assert.assertThat(list.asCollection(), IsEquivalent.equivalentTo(originalList.asCollection()));
}
@Test
public void canRoundTripListWithCustomLabel() {
// Arrange:
final SerializableList<MockSerializableEntity> originalList = new SerializableList<>(10, "objects");
originalList.add(new MockSerializableEntity(5, "foo", 6));
originalList.add(new MockSerializableEntity(8, "bar", 7));
// Act:
final Deserializer deserializer = Utils.roundtripSerializableEntity(originalList, null);
final SerializableList<MockSerializableEntity> list =
new SerializableList<>(deserializer, MockSerializableEntity::new, "objects");
// Assert:
Assert.assertThat(list.asCollection(), IsEquivalent.equivalentTo(originalList.asCollection()));
}
//endregion
//region hashCode / equals
@Test
public void hashCodeIsConsistentForUnchangedList() {
// Arrange:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(10);
final int hashCode = list.hashCode();
// Assert:
Assert.assertThat(list.hashCode(), IsEqual.equalTo(hashCode));
}
@Test
public void hashCodeChangesWhenListChanges() {
// Arrange:
final SerializableList<MockSerializableEntity> list = new SerializableList<>(10);
final int hashCode = list.hashCode();
// Act:
list.add(new MockSerializableEntity());
// Assert:
Assert.assertThat(list.hashCode(), IsNot.not(IsEqual.equalTo(hashCode)));
}
@Test
public void equalsOnlyReturnsTrueForEquivalentObjects() {
// Arrange:
final List<MockSerializableEntity> entities1 = Arrays.asList(
new MockSerializableEntity(5, "foo", 6),
new MockSerializableEntity(8, "bar", 7));
final List<MockSerializableEntity> entities2 = Arrays.asList(
new MockSerializableEntity(5, "foo", 6),
new MockSerializableEntity(8, "bar", 8));
final SerializableList<MockSerializableEntity> list1 = new SerializableList<>(entities1);
final SerializableList<MockSerializableEntity> list2 = new SerializableList<>(entities1);
final SerializableList<MockSerializableEntity> list3 = new SerializableList<>(entities2);
// Assert:
Assert.assertThat(list2, IsEqual.equalTo(list1));
Assert.assertThat(list3, IsNot.not(IsEqual.equalTo(list1)));
Assert.assertThat(entities1, IsNot.not((Object)IsEqual.equalTo(list1)));
Assert.assertThat(null, IsNot.not(IsEqual.equalTo(list1)));
}
//endregion
private static void assertListComparison(
final SerializableList<MockSerializableEntity> list1,
final SerializableList<MockSerializableEntity> list2,
final int expectedDifferenceIndex,
final boolean expectedEquals) {
// Assert:
if (expectedEquals) {
Assert.assertThat(list1, IsEqual.equalTo(list2));
Assert.assertThat(list2, IsEqual.equalTo(list1));
} else {
Assert.assertThat(list1, IsNot.not(IsEqual.equalTo(list2)));
Assert.assertThat(list2, IsNot.not(IsEqual.equalTo(list1)));
}
Assert.assertThat(list1.findFirstDifference(list2), IsEqual.equalTo(expectedDifferenceIndex));
Assert.assertThat(list2.findFirstDifference(list1), IsEqual.equalTo(expectedDifferenceIndex));
}
private static MockSerializableEntity deserializeFromObject(final Object object) {
return new MockSerializableEntity(new JsonDeserializer((JSONObject)object, null));
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
/**
* {@link DataTiersFeatureSetUsage} represents the xpack usage for data tiers.
* This includes things like the number of nodes per tier, indices, shards, etc.
* See {@link TierSpecificStats} for the stats that are tracked on a per-tier
* basis.
*/
public class DataTiersFeatureSetUsage extends XPackFeatureSet.Usage {
private final Map<String, TierSpecificStats> tierStats;
public DataTiersFeatureSetUsage(StreamInput in) throws IOException {
super(in);
this.tierStats = in.readMap(StreamInput::readString, TierSpecificStats::new);
}
public DataTiersFeatureSetUsage(Map<String, TierSpecificStats> tierStats) {
super(XPackField.DATA_TIERS, true, true);
this.tierStats = tierStats;
}
@Override
public Version getMinimalSupportedVersion() {
return Version.V_7_10_0;
}
public Map<String, TierSpecificStats> getTierStats() {
return Collections.unmodifiableMap(tierStats);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeMap(tierStats, StreamOutput::writeString, (o, v) -> v.writeTo(o));
}
@Override
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
super.innerXContent(builder, params);
for (Map.Entry<String, TierSpecificStats> entry : tierStats.entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
}
@Override
public int hashCode() {
return Objects.hash(tierStats);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DataTiersFeatureSetUsage other = (DataTiersFeatureSetUsage) obj;
return Objects.equals(available, other.available)
&& Objects.equals(enabled, other.enabled)
&& Objects.equals(tierStats, other.tierStats);
}
@Override
public String toString() {
return Strings.toString(this);
}
/**
* {@link TierSpecificStats} represents statistics about nodes in a single
* tier, for example, how many nodes there are, the index count, shard
* count, etc.
*/
public static class TierSpecificStats implements Writeable, ToXContentObject {
public final int nodeCount;
public final int indexCount;
public final int totalShardCount;
public final int primaryShardCount;
public final long docCount;
public final long totalByteCount;
public final long primaryByteCount;
public final long primaryByteCountMedian;
public final long primaryShardBytesMAD;
public TierSpecificStats(StreamInput in) throws IOException {
this.nodeCount = in.readVInt();
this.indexCount = in.readVInt();
this.totalShardCount = in.readVInt();
this.primaryShardCount = in.readVInt();
this.docCount = in.readVLong();
this.totalByteCount = in.readVLong();
this.primaryByteCount = in.readVLong();
this.primaryByteCountMedian = in.readVLong();
this.primaryShardBytesMAD = in.readVLong();
}
public TierSpecificStats(
int nodeCount,
int indexCount,
int totalShardCount,
int primaryShardCount,
long docCount,
long totalByteCount,
long primaryByteCount,
long primaryByteCountMedian,
long primaryShardBytesMAD
) {
this.nodeCount = nodeCount;
this.indexCount = indexCount;
this.totalShardCount = totalShardCount;
this.primaryShardCount = primaryShardCount;
this.docCount = docCount;
this.totalByteCount = totalByteCount;
this.primaryByteCount = primaryByteCount;
this.primaryByteCountMedian = primaryByteCountMedian;
this.primaryShardBytesMAD = primaryShardBytesMAD;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.nodeCount);
out.writeVInt(this.indexCount);
out.writeVInt(this.totalShardCount);
out.writeVInt(this.primaryShardCount);
out.writeVLong(this.docCount);
out.writeVLong(this.totalByteCount);
out.writeVLong(this.primaryByteCount);
out.writeVLong(this.primaryByteCountMedian);
out.writeVLong(this.primaryShardBytesMAD);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("node_count", nodeCount);
builder.field("index_count", indexCount);
builder.field("total_shard_count", totalShardCount);
builder.field("primary_shard_count", primaryShardCount);
builder.field("doc_count", docCount);
builder.humanReadableField("total_size_bytes", "total_size", new ByteSizeValue(totalByteCount));
builder.humanReadableField("primary_size_bytes", "primary_size", new ByteSizeValue(primaryByteCount));
builder.humanReadableField(
"primary_shard_size_avg_bytes",
"primary_shard_size_avg",
new ByteSizeValue(primaryShardCount == 0 ? 0 : (primaryByteCount / primaryShardCount))
);
builder.humanReadableField(
"primary_shard_size_median_bytes",
"primary_shard_size_median",
new ByteSizeValue(primaryByteCountMedian)
);
builder.humanReadableField("primary_shard_size_mad_bytes", "primary_shard_size_mad", new ByteSizeValue(primaryShardBytesMAD));
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(
this.nodeCount,
this.indexCount,
this.totalShardCount,
this.primaryShardCount,
this.totalByteCount,
this.primaryByteCount,
this.docCount,
this.primaryByteCountMedian,
this.primaryShardBytesMAD
);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TierSpecificStats other = (TierSpecificStats) obj;
return nodeCount == other.nodeCount
&& indexCount == other.indexCount
&& totalShardCount == other.totalShardCount
&& primaryShardCount == other.primaryShardCount
&& docCount == other.docCount
&& totalByteCount == other.totalByteCount
&& primaryByteCount == other.primaryByteCount
&& primaryByteCountMedian == other.primaryByteCountMedian
&& primaryShardBytesMAD == other.primaryShardBytesMAD;
}
@Override
public String toString() {
return Strings.toString(this);
}
}
}
| |
package org.ow2.chameleon.fuchsia.core.component;
/*
* #%L
* OW2 Chameleon - Fuchsia Core
* %%
* Copyright (C) 2009 - 2014 OW2 Chameleon
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.apache.felix.ipojo.annotations.*;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Filter;
import org.osgi.framework.ServiceReference;
import org.ow2.chameleon.fuchsia.core.FuchsiaConstants;
import org.ow2.chameleon.fuchsia.core.component.manager.LinkerBinderManager;
import org.ow2.chameleon.fuchsia.core.component.manager.LinkerDeclarationsManager;
import org.ow2.chameleon.fuchsia.core.component.manager.LinkerManagement;
import org.ow2.chameleon.fuchsia.core.declaration.ExportDeclaration;
import org.ow2.chameleon.fuchsia.core.exceptions.InvalidFilterException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import static org.apache.felix.ipojo.Factory.INSTANCE_NAME_PROPERTY;
import static org.ow2.chameleon.fuchsia.core.FuchsiaUtils.getFilter;
import static org.ow2.chameleon.fuchsia.core.component.manager.DeclarationBinder.TARGET_FILTER_PROPERTY;
/**
* The {@link DefaultExportationLinker} component is the default implementation of the interface.
* {@link ExportationLinker}.
* <p/>
* The {@link DefaultExportationLinker} component take as mandatory ServiceProperty a filter on the
* {@link ExportDeclaration} named {@literal {@link #FILTER_EXPORTDECLARATION_PROPERTY }} and a filter on
* {@link ExporterService} named {@literal {@link #FILTER_EXPORTERSERVICE_PROPERTY }}.
* <p/>
* The filters are String with the LDAP syntax OR {@link org.osgi.framework.Filter}.
*
* @author Morgan Martinet
*/
@Component(name = FuchsiaConstants.DEFAULT_EXPORTATION_LINKER_FACTORY_NAME)
@Provides(specifications = {ExportationLinker.class, ExportationLinkerIntrospection.class})
public class DefaultExportationLinker implements ExportationLinker, ExportationLinkerIntrospection {
/**
* Logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(DefaultExportationLinker.class);
// The OSGi BundleContext, injected by OSGi in the constructor
private final BundleContext bundleContext;
private final Object lock = new Object();
private final LinkerManagement<ExportDeclaration, ExporterService> linkerManagement;
private final LinkerBinderManager<ExportDeclaration, ExporterService> exportersManager;
private final LinkerDeclarationsManager<ExportDeclaration, ExporterService> declarationsManager;
@Controller
private boolean state;
@ServiceProperty(name = INSTANCE_NAME_PROPERTY)
private String linkerName;
@ServiceProperty(name = FILTER_EXPORTDECLARATION_PROPERTY, mandatory = true)
@Property(name = FILTER_EXPORTDECLARATION_PROPERTY, mandatory = true)
private Object exportDeclarationFilterProperty;
private Filter exportDeclarationFilter;
@ServiceProperty(name = FILTER_EXPORTERSERVICE_PROPERTY, mandatory = true)
@Property(name = FILTER_EXPORTERSERVICE_PROPERTY, mandatory = true)
private Object exporterServiceFilterProperty;
private Filter exporterServiceFilter;
public DefaultExportationLinker(BundleContext context) {
this.bundleContext = context;
processProperties();
linkerManagement = new LinkerManagement<ExportDeclaration, ExporterService>(bundleContext, exporterServiceFilter, exportDeclarationFilter);
exportersManager = linkerManagement.getBindersManager();
declarationsManager = linkerManagement.getDeclarationsManager();
}
@Validate
public void start() {
LOG.debug("ExportationLinker " + linkerName + " starting");
}
@Invalidate
public void stop() {
LOG.debug("ExportationLinker " + linkerName + " stopping");
}
/**
* Get the filters ExporterServiceFilter and ExportDeclarationFilter from the properties, stop the instance if one of.
* them is invalid.
*/
private void processProperties() {
state = true;
try {
exporterServiceFilter = getFilter(exporterServiceFilterProperty);
} catch (InvalidFilterException invalidFilterException) {
LOG.debug("The value of the Property " + FILTER_EXPORTERSERVICE_PROPERTY + " is invalid,"
+ " the recuperation of the Filter has failed. The instance gonna stop.", invalidFilterException);
state = false;
return;
}
try {
exportDeclarationFilter = getFilter(exportDeclarationFilterProperty);
} catch (InvalidFilterException invalidFilterException) {
LOG.debug("The value of the Property " + FILTER_EXPORTDECLARATION_PROPERTY + " is invalid,"
+ " the recuperation of the Filter has failed. The instance gonna stop.", invalidFilterException);
state = false;
return;
}
}
/**
* Called by iPOJO when the configuration of the DefaultExportationLinker is updated.
* <p/>
* Call #processProperties() to get the updated filters ExporterServiceFilter and ExportDeclarationFilter.
* Compute and apply the changes in the links relatives to the changes in the filters.
*/
@Updated
public void updated() {
processProperties();
synchronized (lock) {
exportersManager.applyFilterChanges(exporterServiceFilter);
declarationsManager.applyFilterChanges(exportDeclarationFilter);
}
}
/**
* Bind the {@link ExporterService} matching the exporterServiceFilter.
* <p/>
* Check all the already bound {@link ExportDeclaration}s, if the metadata of the ExportDeclaration match the filter
* exposed by the ExporterService, link them together.
*/
@Bind(id = "exporterServices", specification = ExporterService.class, aggregate = true, optional = true)
void bindExporterService(ServiceReference<ExporterService> serviceReference) {
synchronized (lock) {
try {
exportersManager.add(serviceReference);
} catch (InvalidFilterException invalidFilterException) {
LOG.error("The ServiceProperty \"" + TARGET_FILTER_PROPERTY + "\" of the ExporterService "
+ bundleContext.getService(serviceReference) + " doesn't provides a valid Filter."
+ " To be used, it must provides a correct \"" + TARGET_FILTER_PROPERTY + "\" ServiceProperty.",
invalidFilterException
);
return;
}
if (!exportersManager.matched(serviceReference)) {
return;
}
LOG.debug(linkerName + " : Bind the ExporterService "
+ exportersManager.getDeclarationBinder(serviceReference)
+ " with filter " + exportersManager.getTargetFilter(serviceReference));
exportersManager.createLinks(serviceReference);
}
}
/**
* Update the Target Filter of the ExporterService.
* Apply the induce modifications on the links of the ExporterService
*
* @param serviceReference
*/
@Modified(id = "exporterServices")
void modifiedExporterService(ServiceReference<ExporterService> serviceReference) {
try {
exportersManager.modified(serviceReference);
} catch (InvalidFilterException invalidFilterException) {
LOG.error("The ServiceProperty \"" + TARGET_FILTER_PROPERTY + "\" of the ExporterService "
+ bundleContext.getService(serviceReference) + " doesn't provides a valid Filter."
+ " To be used, it must provides a correct \"" + TARGET_FILTER_PROPERTY + "\" ServiceProperty.",
invalidFilterException
);
exportersManager.removeLinks(serviceReference);
return;
}
if (exportersManager.matched(serviceReference)) {
exportersManager.updateLinks(serviceReference);
} else {
exportersManager.removeLinks(serviceReference);
}
}
/**
* Unbind the {@link ExporterService}.
*/
@Unbind(id = "exporterServices")
void unbindExporterService(ServiceReference<ExporterService> serviceReference) {
LOG.debug(linkerName + " : Unbind the ExporterService " + exportersManager.getDeclarationBinder(serviceReference));
synchronized (lock) {
exportersManager.removeLinks(serviceReference);
exportersManager.remove(serviceReference);
}
}
/**
* Bind the {@link ExportDeclaration} matching the filter ExportDeclarationFilter.
* <p/>
* Check if metadata of the ExportDeclaration match the filter exposed by the {@link ExporterService}s bound.
* If the ExportDeclaration matches the ExporterService filter, link them together.
*/
@Bind(id = "exportDeclarations", specification = ExportDeclaration.class, aggregate = true, optional = true)
void bindExportDeclaration(ServiceReference<ExportDeclaration> exportDeclarationSRef) {
synchronized (lock) {
declarationsManager.add(exportDeclarationSRef);
LOG.debug(linkerName + " : Bind the ExportDeclaration "
+ declarationsManager.getDeclaration(exportDeclarationSRef));
if (!declarationsManager.matched(exportDeclarationSRef)) {
return;
}
declarationsManager.createLinks(exportDeclarationSRef);
}
}
/**
* Unbind and bind the {@link ExportDeclaration}.
*/
@Modified(id = "exportDeclarations")
void modifiedExportDeclaration(ServiceReference<ExportDeclaration> exportDeclarationSRef) {
LOG.debug(linkerName + " : Modify the ExportDeclaration "
+ declarationsManager.getDeclaration(exportDeclarationSRef));
synchronized (lock) {
declarationsManager.removeLinks(exportDeclarationSRef);
declarationsManager.modified(exportDeclarationSRef);
if (!declarationsManager.matched(exportDeclarationSRef)) {
return;
}
declarationsManager.createLinks(exportDeclarationSRef);
}
}
/**
* Unbind the {@link ExportDeclaration}.
*/
@Unbind(id = "exportDeclarations")
void unbindExportDeclaration(ServiceReference<ExportDeclaration> exportDeclarationSRef) {
LOG.debug(linkerName + " : Unbind the ExportDeclaration "
+ declarationsManager.getDeclaration(exportDeclarationSRef));
synchronized (lock) {
declarationsManager.removeLinks(exportDeclarationSRef);
declarationsManager.remove(exportDeclarationSRef);
}
}
public String getName() {
return linkerName;
}
/**
* Return the exporterServices linked this DefaultExportationLinker.
*
* @return The exporterServices linked to this DefaultExportationLinker
*/
public Set<ExporterService> getLinkedExporters() {
return exportersManager.getMatchedDeclarationBinder();
}
/**
* Return the exportDeclarations bind by this DefaultExportationLinker.
*
* @return The exportDeclarations bind by this DefaultExportationLinker
*/
public Set<ExportDeclaration> getExportDeclarations() {
return declarationsManager.getMatchedDeclaration();
}
}
| |
/*
* Copyright (C) 2014-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.xml.serialize.write;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import org.junit.Ignore;
import org.junit.Test;
import com.helger.commons.charset.CharsetHelper;
import com.helger.commons.mock.CommonsAssert;
import com.helger.commons.mock.CommonsTestHelper;
import com.helger.commons.string.StringHelper;
import com.helger.commons.system.ENewLineMode;
import com.helger.xml.namespace.MapBasedNamespaceContext;
/**
* Test class for class {@link XMLWriterSettings}.
*
* @author Philip Helger
*/
public final class XMLWriterSettingsTest
{
private static final boolean [] BOOLS = new boolean [] { true, false };
@Test
public void testDefault ()
{
IXMLWriterSettings mws = XMLWriterSettings.DEFAULT_XML_SETTINGS;
assertEquals (EXMLSerializeXMLDeclaration.EMIT, mws.getSerializeXMLDeclaration ());
assertEquals (EXMLSerializeDocType.EMIT, mws.getSerializeDocType ());
assertTrue (mws.isNewLineAfterXMLDeclaration ());
assertEquals (EXMLSerializeComments.EMIT, mws.getSerializeComments ());
assertEquals (XMLWriterSettings.DEFAULT_XML_CHARSET, mws.getCharset ().name ());
assertEquals (EXMLSerializeIndent.INDENT_AND_ALIGN, mws.getIndent ());
assertEquals (StandardCharsets.UTF_8, mws.getCharset ());
assertTrue (mws.isSpaceOnSelfClosedElement ());
assertTrue (mws.isUseDoubleQuotesForAttributes ());
assertEquals (ENewLineMode.DEFAULT, mws.getNewLineMode ());
assertEquals (ENewLineMode.DEFAULT.getText (), mws.getNewLineString ());
assertEquals (" ", mws.getIndentationString ());
assertTrue (mws.isEmitNamespaces ());
assertFalse (mws.isPutNamespaceContextPrefixesInRoot ());
mws = new XMLWriterSettings ();
assertEquals (EXMLSerializeXMLDeclaration.EMIT, mws.getSerializeXMLDeclaration ());
assertEquals (EXMLSerializeDocType.EMIT, mws.getSerializeDocType ());
assertEquals (EXMLSerializeComments.EMIT, mws.getSerializeComments ());
assertEquals (XMLWriterSettings.DEFAULT_XML_CHARSET, mws.getCharset ().name ());
assertEquals (EXMLSerializeIndent.INDENT_AND_ALIGN, mws.getIndent ());
assertEquals (StandardCharsets.UTF_8, mws.getCharset ());
assertTrue (mws.isSpaceOnSelfClosedElement ());
assertTrue (mws.isUseDoubleQuotesForAttributes ());
assertTrue (mws.isEmitNamespaces ());
assertFalse (mws.isPutNamespaceContextPrefixesInRoot ());
CommonsTestHelper.testDefaultImplementationWithEqualContentObject (mws, new XMLWriterSettings ());
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setSerializeXMLDeclaration (EXMLSerializeXMLDeclaration.IGNORE));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setSerializeDocType (EXMLSerializeDocType.IGNORE));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setNewLineAfterXMLDeclaration (false));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setSerializeComments (EXMLSerializeComments.IGNORE));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setIndent (EXMLSerializeIndent.NONE));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setCharset (StandardCharsets.US_ASCII));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setNamespaceContext (new MapBasedNamespaceContext ().addMapping ("prefix",
"uri")));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setSpaceOnSelfClosedElement (false));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setUseDoubleQuotesForAttributes (false));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setNewLineMode (ENewLineMode.DEFAULT == ENewLineMode.WINDOWS ? ENewLineMode.UNIX
: ENewLineMode.WINDOWS));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws, new XMLWriterSettings ().setIndentationString ("\t"));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws, new XMLWriterSettings ().setEmitNamespaces (false));
CommonsTestHelper.testDefaultImplementationWithDifferentContentObject (mws,
new XMLWriterSettings ().setPutNamespaceContextPrefixesInRoot (true));
}
@Test
@Ignore ("Tested and works but takes too long")
public void testPermutations ()
{
// Now try all permutations
final XMLWriterSettings aXWS = new XMLWriterSettings ();
for (final EXMLSerializeXMLDeclaration eXMLDecl : EXMLSerializeXMLDeclaration.values ())
{
aXWS.setSerializeXMLDeclaration (eXMLDecl);
assertEquals (eXMLDecl, aXWS.getSerializeXMLDeclaration ());
for (final EXMLSerializeDocType eDocType : EXMLSerializeDocType.values ())
{
aXWS.setSerializeDocType (eDocType);
assertEquals (eDocType, aXWS.getSerializeDocType ());
for (final boolean bNewLineAfterXMLDecl : BOOLS)
{
aXWS.setNewLineAfterXMLDeclaration (bNewLineAfterXMLDecl);
CommonsAssert.assertEquals (bNewLineAfterXMLDecl, aXWS.isNewLineAfterXMLDeclaration ());
for (final EXMLSerializeComments eComments : EXMLSerializeComments.values ())
{
aXWS.setSerializeComments (eComments);
assertEquals (eComments, aXWS.getSerializeComments ());
for (final EXMLSerializeIndent eIndent : EXMLSerializeIndent.values ())
{
aXWS.setIndent (eIndent);
assertEquals (eIndent, aXWS.getIndent ());
for (final EXMLIncorrectCharacterHandling eIncorrectCharHandling : EXMLIncorrectCharacterHandling.values ())
{
aXWS.setIncorrectCharacterHandling (eIncorrectCharHandling);
assertEquals (eIncorrectCharHandling, aXWS.getIncorrectCharacterHandling ());
for (final Charset aCS : CharsetHelper.getAllCharsets ().values ())
{
aXWS.setCharset (aCS);
assertEquals (aCS, aXWS.getCharset ());
assertEquals (aCS.name (), aXWS.getCharset ().name ());
for (final boolean bUseDoubleQuotesForAttributes : BOOLS)
{
aXWS.setUseDoubleQuotesForAttributes (bUseDoubleQuotesForAttributes);
CommonsAssert.assertEquals (bUseDoubleQuotesForAttributes, aXWS.isUseDoubleQuotesForAttributes ());
for (final boolean bSpaceOnSelfClosedElement : BOOLS)
{
aXWS.setSpaceOnSelfClosedElement (bSpaceOnSelfClosedElement);
CommonsAssert.assertEquals (bSpaceOnSelfClosedElement, aXWS.isSpaceOnSelfClosedElement ());
for (final ENewLineMode eNewlineMode : ENewLineMode.values ())
{
aXWS.setNewLineMode (eNewlineMode);
assertEquals (eNewlineMode, aXWS.getNewLineMode ());
assertTrue (StringHelper.hasText (aXWS.getNewLineString ()));
for (final String sIndentation : new String [] { "\t", " " })
{
aXWS.setIndentationString (sIndentation);
assertEquals (sIndentation, aXWS.getIndentationString ());
for (final boolean bEmitNamespaces : BOOLS)
{
aXWS.setEmitNamespaces (bEmitNamespaces);
CommonsAssert.assertEquals (bEmitNamespaces, aXWS.isEmitNamespaces ());
for (final boolean bPutNamespaceContextPrefixesInRoot : BOOLS)
{
aXWS.setPutNamespaceContextPrefixesInRoot (bPutNamespaceContextPrefixesInRoot);
CommonsAssert.assertEquals (bPutNamespaceContextPrefixesInRoot, aXWS.isPutNamespaceContextPrefixesInRoot ());
final XMLWriterSettings aXWS2 = new XMLWriterSettings ().setSerializeXMLDeclaration (eXMLDecl)
.setSerializeDocType (eDocType)
.setSerializeComments (eComments)
.setIndent (eIndent)
.setIncorrectCharacterHandling (eIncorrectCharHandling)
.setCharset (aCS)
.setUseDoubleQuotesForAttributes (bUseDoubleQuotesForAttributes)
.setSpaceOnSelfClosedElement (bSpaceOnSelfClosedElement)
.setNewLineMode (eNewlineMode)
.setIndentationString (sIndentation)
.setEmitNamespaces (bEmitNamespaces)
.setPutNamespaceContextPrefixesInRoot (bPutNamespaceContextPrefixesInRoot);
CommonsTestHelper.testEqualsImplementationWithEqualContentObject (aXWS, aXWS2);
CommonsTestHelper.testHashcodeImplementationWithEqualContentObject (aXWS, aXWS2);
// Main time is spent in the "toString" calls - so
// don't test it in the loop
}
CommonsAssert.assertEquals (bEmitNamespaces, aXWS.isEmitNamespaces ());
}
assertEquals (sIndentation, aXWS.getIndentationString ());
}
assertEquals (eNewlineMode, aXWS.getNewLineMode ());
}
CommonsAssert.assertEquals (bSpaceOnSelfClosedElement, aXWS.isSpaceOnSelfClosedElement ());
}
CommonsAssert.assertEquals (bUseDoubleQuotesForAttributes, aXWS.isUseDoubleQuotesForAttributes ());
}
assertEquals (aCS, aXWS.getCharset ());
assertEquals (aCS.name (), aXWS.getCharset ().name ());
}
assertEquals (eIncorrectCharHandling, aXWS.getIncorrectCharacterHandling ());
}
assertEquals (eIndent, aXWS.getIndent ());
}
assertEquals (eComments, aXWS.getSerializeComments ());
}
CommonsAssert.assertEquals (bNewLineAfterXMLDecl, aXWS.isNewLineAfterXMLDeclaration ());
}
assertEquals (eDocType, aXWS.getSerializeDocType ());
}
assertEquals (eXMLDecl, aXWS.getSerializeXMLDeclaration ());
}
}
@Test
public void testNullParams ()
{
try
{
new XMLWriterSettings ().setSerializeVersion (null);
fail ();
}
catch (final NullPointerException ex)
{}
try
{
new XMLWriterSettings ().setSerializeXMLDeclaration (null);
fail ();
}
catch (final NullPointerException ex)
{}
try
{
new XMLWriterSettings ().setSerializeDocType (null);
fail ();
}
catch (final NullPointerException ex)
{}
try
{
new XMLWriterSettings ().setSerializeComments (null);
fail ();
}
catch (final NullPointerException ex)
{}
try
{
new XMLWriterSettings ().setCharset ((Charset) null);
fail ();
}
catch (final NullPointerException ex)
{}
try
{
new XMLWriterSettings ().setIndent (null);
fail ();
}
catch (final NullPointerException ex)
{}
}
}
| |
package org.reactivecouchbase.concurrent.test;
import org.junit.Assert;
import org.junit.Test;
import org.reactivecouchbase.common.Duration;
import org.reactivecouchbase.concurrent.Await;
import org.reactivecouchbase.concurrent.Future;
import org.reactivecouchbase.concurrent.NamedExecutors;
import org.reactivecouchbase.concurrent.Promise;
import org.reactivecouchbase.functional.Try;
import org.reactivecouchbase.functional.Unit;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
public class ConcurrentTest {
public static ExecutorService ec = NamedExecutors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() + 1, "ConcurrentTestEC");
public static ScheduledExecutorService sched = NamedExecutors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() + 1, "ConcurrentTestScheduledEC");
@Test
public void testAwait() {
NamedExecutors.newCachedThreadPool("Hello");
NamedExecutors.newFixedThreadPool(2, "Hello");
NamedExecutors.newSingleThreadPool("Hello");
Future<String> fu = Future.successful("Hello");
Await.result(fu, Duration.parse("2 s"));
Await.result(fu, 2L, TimeUnit.SECONDS);
Await.resultForever(fu);
Await.resultForeverOr(fu, "Goodbye");
Await.resultOr(fu, "Goodbye", Duration.parse("2 s"));
Await.resultOr(fu, "Goodbye", 2L, TimeUnit.SECONDS);
}
@Test
public void testFuture() throws Exception {
final CountDownLatch latch = new CountDownLatch(9);
Future<Unit> future = Future.async(() -> {
System.out.println("Async 1");
try {
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
}
latch.countDown();
}, ec).map(aVoid -> {
System.out.println("Map 1");
latch.countDown();
return null;
}, ec).map(aVoid -> {
System.out.println("Map 2");
latch.countDown();
return null;
}, ec).filter(aVoid -> {
System.out.println("Filter");
latch.countDown();
return true;
}, ec).filterNot(aVoid -> {
System.out.println("FilterNot");
latch.countDown();
return false;
}, ec).flatMap(aVoid -> {
System.out.println("Flatmap");
latch.countDown();
return Future.async(() -> {
System.out.println("Async 2");
latch.countDown();
}, ec);
}, ec).andThen(voidTry -> {
System.out.println("andThen");
latch.countDown();
}, ec);
future.foreach(o -> {
System.out.println("Foreach");
latch.countDown();
return null;
}, ec);
latch.await(2, TimeUnit.SECONDS);
Assert.assertEquals(0, latch.getCount());
}
@Test
public void testFutureError() throws Exception {
final CountDownLatch latch = new CountDownLatch(6);
final CountDownLatch errorlatch = new CountDownLatch(1);
Future<Void> future = Future.async(() -> {
try {
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
}
latch.countDown();
throw new RuntimeException("Damn it !!!");
}, ec).map(aVoid -> {
errorlatch.countDown();
return null;
}, ec).recover((Function<Throwable, Void>) throwable -> {
latch.countDown();
return null;
}, ec);
future.onError(throwable -> {
latch.countDown();
}, ec);
future.recoverWith(throwable -> {
latch.countDown();
return Future.async(() -> {
latch.countDown();
return null;
}, ec);
}, ec).map((Function<Void, Void>) aVoid -> {
latch.countDown();
return null;
}, ec);
latch.await(2, TimeUnit.SECONDS);
errorlatch.await(2, TimeUnit.SECONDS);
Assert.assertEquals(0, latch.getCount());
Assert.assertEquals(1, errorlatch.getCount());
}
@Test
public void testPromise() throws Exception {
final CountDownLatch latch = new CountDownLatch(8);
Future<Unit> future = Promise.<Void>successful(null).future().map(aVoid -> {
System.out.println("Map 1");
latch.countDown();
return null;
}, ec).map(aVoid -> {
System.out.println("Map 2");
latch.countDown();
return null;
}, ec).filter(aVoid -> {
System.out.println("Filter");
latch.countDown();
return true;
}, ec).filterNot(aVoid -> {
System.out.println("FilterNot");
latch.countDown();
return false;
}, ec).flatMap(aVoid -> {
System.out.println("Flatmap");
latch.countDown();
return Future.async(() -> {
System.out.println("Async 2");
latch.countDown();
}, ec);
}, ec).andThen(voidTry -> {
System.out.println("andThen");
latch.countDown();
}, ec);
future.foreach(o -> {
System.out.println("Foreach");
latch.countDown();
return null;
}, ec);
latch.await(2, TimeUnit.SECONDS);
Assert.assertEquals(0, latch.getCount()); }
@Test
public void testPromiseError() throws Exception {
final CountDownLatch latch = new CountDownLatch(5);
final CountDownLatch errorlatch = new CountDownLatch(1);
Future<Void> future = Promise.<Void>failed(new RuntimeException("Damn it !!!")).future().map(new Function<Void, Void>() {
@Override
public Void apply(java.lang.Void aVoid) {
errorlatch.countDown();
return null;
}
}, ec).recover((Function<Throwable, Void>) throwable -> {
latch.countDown();
return null;
}, ec);
future.onError(throwable -> {
latch.countDown();
}, ec);
future.recoverWith(throwable -> {
latch.countDown();
return Future.async(() -> {
latch.countDown();
return null;
}, ec);
}, ec).map(v -> {
latch.countDown();
return null;
}, ec);
latch.await(2, TimeUnit.SECONDS);
errorlatch.await(2, TimeUnit.SECONDS);
Assert.assertEquals(0, latch.getCount());
Assert.assertEquals(1, errorlatch.getCount());
}
public static <T> List<T> newArrayList(T... items) {
List<T> list = new ArrayList<>();
for (T item : items) {
list.add(item);
}
return list;
}
@Test
public void testSequence() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
Future<Void> fu1 = Future.timeout(null, 1L, TimeUnit.SECONDS, sched);
Future<Void> fu2 = Future.timeout(null, 2L, TimeUnit.SECONDS, sched);
Future<Void> fu3 = Future.timeout(null, 500L, TimeUnit.MILLISECONDS, sched);
Future.sequence(newArrayList(fu1, fu2, fu3), sched).onComplete(listTry -> latch.countDown(), sched);
latch.await(5L, TimeUnit.SECONDS);
Assert.assertEquals(0, latch.getCount());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.core.axis2;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.soap.SOAPBody;
import org.apache.axiom.soap.SOAPEnvelope;
import org.apache.axiom.soap.SOAPHeader;
import org.apache.axiom.soap.SOAPHeaderBlock;
import org.apache.axis2.AxisFault;
import org.apache.axis2.Constants;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.addressing.RelatesTo;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.synapse.ContinuationState;
import org.apache.synapse.FaultHandler;
import org.apache.synapse.Mediator;
import org.apache.synapse.MessageContext;
import org.apache.synapse.SynapseConstants;
import org.apache.synapse.config.Entry;
import org.apache.synapse.config.SynapseConfiguration;
import org.apache.synapse.core.SynapseEnvironment;
import org.apache.synapse.endpoints.Endpoint;
import org.apache.synapse.mediators.base.SequenceMediator;
import org.apache.synapse.mediators.template.InvokeMediator;
import org.apache.synapse.mediators.template.TemplateMediator;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
/**
* This is the MessageContext implementation that synapse uses almost all the time because Synapse
* is implemented on top of the Axis2
*/
public class Axis2MessageContext implements MessageContext {
/**
* Holds the reference to the Synapse Message Context
*/
private SynapseConfiguration synCfg = null;
/**
* Holds the environment on which synapse operates
*/
private SynapseEnvironment synEnv = null;
/**
* Synapse Message Context properties
*/
private final Map<String, Object> properties = new HashMap<String, Object>();
/**
* Local entries fetched from the configuration or from the registry for the transactional
* resource access
*/
private final Map<String, Object> localEntries = new HashMap<String, Object>();
/**
* Fault Handler stack which will be popped and called the handleFault in error states
*/
private final Stack<FaultHandler> faultStack = new Stack<FaultHandler>();
/**
* ContinuationState stack which is used to store ContinuationStates of mediation flow
*/
private final Stack<ContinuationState> continuationStateStack = new Stack<ContinuationState>();
/**
* The Axis2 MessageContext reference
*/
private org.apache.axis2.context.MessageContext axis2MessageContext = null;
/**
* Attribute of the MC specifying whether this is a response or not
*/
private boolean response = false;
/**
* Attribute specifying whether this MC corresponds to fault response or not
*/
private boolean faultResponse = false;
/**
* Attribute of MC stating the tracing state of the message
*/
private int tracingState = SynapseConstants.TRACING_UNSET;
/**
* The service log for this message
*/
private Log serviceLog = null;
/**
* SequenceCallStack is enabled/disabled for this message
*/
private boolean continuationEnabled = false;
/**
* Position of the current mediator in execution in the sequence flow
*/
private int mediatorPosition = 0;
public SynapseConfiguration getConfiguration() {
return synCfg;
}
public void setConfiguration(SynapseConfiguration synCfg) {
this.synCfg = synCfg;
}
public SynapseEnvironment getEnvironment() {
return synEnv;
}
public void setEnvironment(SynapseEnvironment synEnv) {
this.synEnv = synEnv;
}
public Map<String, Object> getContextEntries() {
return localEntries;
}
public void setContextEntries(Map<String, Object> entries) {
this.localEntries.putAll(entries);
}
public Mediator getMainSequence() {
Object o = localEntries.get(SynapseConstants.MAIN_SEQUENCE_KEY);
if (o != null && o instanceof Mediator) {
return (Mediator) o;
} else {
Mediator main = getConfiguration().getMainSequence();
localEntries.put(SynapseConstants.MAIN_SEQUENCE_KEY, main);
return main;
}
}
public Mediator getFaultSequence() {
Object o = localEntries.get(SynapseConstants.FAULT_SEQUENCE_KEY);
if (o != null && o instanceof Mediator) {
return (Mediator) o;
} else {
Mediator fault = getConfiguration().getFaultSequence();
localEntries.put(SynapseConstants.FAULT_SEQUENCE_KEY, fault);
return fault;
}
}
public Mediator getSequence(String key) {
Object o = localEntries.get(key);
if (o != null && o instanceof Mediator) {
return (Mediator) o;
} else {
Mediator m = getConfiguration().getSequence(key);
if (m instanceof SequenceMediator) {
SequenceMediator seqMediator = (SequenceMediator) m;
synchronized (m) {
if (!seqMediator.isInitialized()) {
seqMediator.init(synEnv);
}
}
}
localEntries.put(key, m);
return m;
}
}
public Mediator getDefaultConfiguration(String key){
Object o = localEntries.get(key);
if (o != null && o instanceof Mediator) {
return (Mediator) o;
} else {
Mediator m = getConfiguration().getDefaultConfiguration(key);
localEntries.put(key, m);
return m;
}
}
public OMElement getFormat(String key) {
Object o = localEntries.get(key);
if (o != null && o instanceof OMElement) {
return (OMElement) o;
} else {
OMElement result = getConfiguration().getFormat(key);
localEntries.put(key, result);
return result;
}
}
public Mediator getSequenceTemplate(String key) {
Object o = localEntries.get(key);
if (o != null && o instanceof Mediator) {
return (Mediator) o;
} else {
Mediator m = getConfiguration().getSequenceTemplate(key);
if (m instanceof TemplateMediator) {
TemplateMediator templateMediator = (TemplateMediator) m;
synchronized (m) {
if (!templateMediator.isInitialized()) {
templateMediator.init(synEnv);
}
}
}
localEntries.put(key, m);
return m;
}
}
public Endpoint getEndpoint(String key) {
Object o = localEntries.get(key);
if (o != null && o instanceof Endpoint) {
return (Endpoint) o;
} else {
Endpoint e = getConfiguration().getEndpoint(key);
if (e != null) {
if (!e.isInitialized()) {
synchronized (e) {
if (!e.isInitialized()) {
e.init(synEnv);
}
}
}
localEntries.put(key, e);
}
return e;
}
}
public Object getEntry(String key) {
Object o = localEntries.get(key);
if (o != null && o instanceof Entry) {
return ((Entry) o).getValue();
} else {
Object e = getConfiguration().getEntry(key);
if (e != null) {
localEntries.put(key, e);
return e;
} else {
getConfiguration().getEntryDefinition(key);
return getConfiguration().getEntry(key);
}
}
}
/**
* Get a read-only view of all the properties currently set on this
* message context
*
* @return an unmodifiable map of message context properties
*/
public Map<String, Object> getProperties() {
return Collections.unmodifiableMap(properties);
}
public Object getProperty(String key) {
return properties.get(key);
}
public void setProperty(String key, Object value) {
if (value == null) {
return;
}
properties.put(key, value);
// do not commit response by default in the server process
if (SynapseConstants.RESPONSE.equals(key) &&
getAxis2MessageContext().getOperationContext() != null) {
getAxis2MessageContext().getOperationContext().setProperty(
org.apache.axis2.Constants.RESPONSE_WRITTEN, "SKIP");
}
}
public Set getPropertyKeySet() {
return properties.keySet();
}
/**
* Constructor for the Axis2MessageContext inside Synapse
*
* @param axisMsgCtx MessageContext representing the relevant Axis MC
* @param synCfg SynapseConfiguraion describing Synapse
* @param synEnv SynapseEnvironment describing the environment of Synapse
*/
public Axis2MessageContext(org.apache.axis2.context.MessageContext axisMsgCtx,
SynapseConfiguration synCfg, SynapseEnvironment synEnv) {
setAxis2MessageContext(axisMsgCtx);
this.synCfg = synCfg;
this.synEnv = synEnv;
if (synEnv != null && synEnv.isContinuationEnabled()) {
continuationEnabled = true;
}
}
public EndpointReference getFaultTo() {
return axis2MessageContext.getFaultTo();
}
public void setFaultTo(EndpointReference reference) {
axis2MessageContext.setFaultTo(reference);
}
public EndpointReference getFrom() {
return axis2MessageContext.getFrom();
}
public void setFrom(EndpointReference reference) {
axis2MessageContext.setFrom(reference);
}
public SOAPEnvelope getEnvelope() {
return axis2MessageContext.getEnvelope();
}
public void setEnvelope(SOAPEnvelope envelope) throws AxisFault {
axis2MessageContext.setEnvelope(envelope);
}
public String getMessageID() {
return axis2MessageContext.getMessageID();
}
public void setMessageID(String string) {
axis2MessageContext.setMessageID(string);
}
public RelatesTo getRelatesTo() {
return axis2MessageContext.getRelatesTo();
}
public void setRelatesTo(RelatesTo[] reference) {
axis2MessageContext.setRelationships(reference);
}
public EndpointReference getReplyTo() {
return axis2MessageContext.getReplyTo();
}
public void setReplyTo(EndpointReference reference) {
axis2MessageContext.setReplyTo(reference);
}
public EndpointReference getTo() {
return axis2MessageContext.getTo();
}
public void setTo(EndpointReference reference) {
axis2MessageContext.setTo(reference);
}
public void setWSAAction(String actionURI) {
axis2MessageContext.setWSAAction(actionURI);
}
public String getWSAAction() {
return axis2MessageContext.getWSAAction();
}
public void setWSAMessageID(String messageID) {
axis2MessageContext.setWSAMessageId(messageID);
}
public String getWSAMessageID() {
return axis2MessageContext.getMessageID();
}
public String getSoapAction() {
return axis2MessageContext.getSoapAction();
}
public void setSoapAction(String string) {
axis2MessageContext.setSoapAction(string);
}
public boolean isDoingMTOM() {
return axis2MessageContext.isDoingMTOM();
}
public boolean isDoingSWA() {
return axis2MessageContext.isDoingSwA();
}
public void setDoingMTOM(boolean b) {
axis2MessageContext.setDoingMTOM(b);
}
public void setDoingSWA(boolean b) {
axis2MessageContext.setDoingSwA(b);
}
public boolean isDoingPOX() {
return axis2MessageContext.isDoingREST();
}
public void setDoingPOX(boolean b) {
axis2MessageContext.setDoingREST(b);
}
public boolean isDoingGET() {
return Constants.Configuration.HTTP_METHOD_GET.equals(
axis2MessageContext.getProperty(Constants.Configuration.HTTP_METHOD))
&& axis2MessageContext.isDoingREST();
}
public void setDoingGET(boolean b) {
if (b) {
axis2MessageContext.setDoingREST(b);
axis2MessageContext.setProperty(Constants.Configuration.HTTP_METHOD,
Constants.Configuration.HTTP_METHOD_GET);
} else {
axis2MessageContext.removeProperty(Constants.Configuration.HTTP_METHOD);
}
}
public boolean isSOAP11() {
return axis2MessageContext.isSOAP11();
}
public void setResponse(boolean b) {
response = b;
axis2MessageContext.setProperty(SynapseConstants.ISRESPONSE_PROPERTY, b);
}
public boolean isResponse() {
Object o = properties.get(SynapseConstants.RESPONSE);
return o != null && o instanceof String &&
((String) o).equalsIgnoreCase("true") || response;
}
public void setFaultResponse(boolean b) {
this.faultResponse = b;
}
public boolean isFaultResponse() {
return this.faultResponse;
}
public int getTracingState() {
return tracingState;
}
public void setTracingState(int tracingState) {
this.tracingState = tracingState;
}
public Stack<FaultHandler> getFaultStack() {
return this.faultStack;
}
public void pushFaultHandler(FaultHandler fault) {
this.faultStack.push(fault);
}
public void pushContinuationState(ContinuationState continuationState) {
this.continuationStateStack.push(continuationState);
}
public Stack<ContinuationState> getContinuationStateStack() {
return this.continuationStateStack;
}
/**
* Return the service level Log for this message context or null
*
* @return the service level Log for the message
*/
public Log getServiceLog() {
if (serviceLog != null) {
return serviceLog;
} else {
String serviceName = (String) getProperty(SynapseConstants.PROXY_SERVICE);
if (serviceName != null && synCfg.getProxyService(serviceName) != null) {
serviceLog = LogFactory.getLog(
SynapseConstants.SERVICE_LOGGER_PREFIX + serviceName);
return serviceLog;
} else {
serviceLog = LogFactory.getLog(
SynapseConstants.SERVICE_LOGGER_PREFIX.substring(0,
SynapseConstants.SERVICE_LOGGER_PREFIX.length() - 1));
return serviceLog;
}
}
}
/**
* Set the service log
*
* @param serviceLog log to be used on a per-service basis
*/
public void setServiceLog(Log serviceLog) {
this.serviceLog = serviceLog;
}
public org.apache.axis2.context.MessageContext getAxis2MessageContext() {
return axis2MessageContext;
}
public void setAxis2MessageContext(org.apache.axis2.context.MessageContext axisMsgCtx) {
this.axis2MessageContext = axisMsgCtx;
Boolean resp = (Boolean) axisMsgCtx.getProperty(SynapseConstants.ISRESPONSE_PROPERTY);
if (resp != null) {
response = resp;
}
}
public void setPaused(boolean value) {
axis2MessageContext.setPaused(value);
}
public boolean isPaused() {
return axis2MessageContext.isPaused();
}
public boolean isServerSide() {
return axis2MessageContext.isServerSide();
}
public void setServerSide(boolean value) {
axis2MessageContext.setServerSide(value);
}
public String toString() {
StringBuffer sb = new StringBuffer();
String separator = "\n";
if (getTo() != null) {
sb.append("To : ").append(getTo().getAddress());
} else {
sb.append("To : ");
}
if (getFrom() != null) {
sb.append(separator).append("From : ").append(getFrom().getAddress());
}
if (getWSAAction() != null) {
sb.append(separator).append("WSAction : ").append(getWSAAction());
}
if (getSoapAction() != null) {
sb.append(separator).append("SOAPAction : ").append(getSoapAction());
}
if (getReplyTo() != null) {
sb.append(separator).append("ReplyTo : ").append(getReplyTo().getAddress());
}
if (getMessageID() != null) {
sb.append(separator).append("MessageID : ").append(getMessageID());
}
SOAPHeader soapHeader = getEnvelope().getHeader();
if (soapHeader != null) {
sb.append(separator).append("Headers : ");
for (Iterator iter = soapHeader.examineAllHeaderBlocks(); iter.hasNext();) {
Object o = iter.next();
if (o instanceof SOAPHeaderBlock) {
SOAPHeaderBlock headerBlock = (SOAPHeaderBlock) o;
sb.append(separator).append("\t").append(
headerBlock.getLocalName()).append(" : ").append(headerBlock.getText());
} else if (o instanceof OMElement) {
OMElement headerElem = (OMElement) o;
sb.append(separator).append("\t").append(
headerElem.getLocalName()).append(" : ").append(headerElem.getText());
}
}
}
SOAPBody soapBody = getEnvelope().getBody();
if (soapBody != null) {
sb.append(separator).append("Body : ").append(soapBody.toString());
}
return sb.toString();
}
public boolean isContinuationEnabled() {
return continuationEnabled;
}
public void setContinuationEnabled(boolean continuationEnabled) {
this.continuationEnabled = continuationEnabled;
}
public void setMediatorPosition(int mediatorPosition) {
this.mediatorPosition = mediatorPosition;
}
public int getMediatorPosition() {
return mediatorPosition;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.cluster.plugin.nacos;
import com.alibaba.nacos.api.naming.NamingService;
import java.util.Collections;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.cluster.ClusterNodesQuery;
import org.apache.skywalking.oap.server.core.cluster.ClusterRegister;
import org.apache.skywalking.oap.server.core.cluster.RemoteInstance;
import org.apache.skywalking.oap.server.core.remote.client.Address;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.apache.skywalking.oap.server.library.module.ModuleProvider;
import org.apache.skywalking.oap.server.library.module.ModuleStartException;
import org.apache.skywalking.oap.server.telemetry.TelemetryModule;
import org.apache.skywalking.oap.server.telemetry.api.MetricsCreator;
import org.apache.skywalking.oap.server.telemetry.none.MetricsCreatorNoop;
import org.apache.skywalking.oap.server.telemetry.none.NoneTelemetryProvider;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.utility.DockerImageName;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"javax.security.*", "javax.net.ssl.*", "javax.management.*"})
public class ITClusterModuleNacosProviderFunctionalTest {
private String nacosAddress;
private final String username = "nacos";
private final String password = "nacos";
@Rule
public final GenericContainer<?> container =
new GenericContainer<>(DockerImageName.parse("nacos/nacos-server:1.4.2"))
.waitingFor(Wait.forLogMessage(".*Nacos started successfully.*", 1))
.withEnv(Collections.singletonMap("MODE", "standalone"));
@Mock
private ModuleManager moduleManager;
@Mock
private NoneTelemetryProvider telemetryProvider;
@Before
public void before() {
Mockito.when(telemetryProvider.getService(MetricsCreator.class))
.thenReturn(new MetricsCreatorNoop());
TelemetryModule telemetryModule = Mockito.spy(TelemetryModule.class);
Whitebox.setInternalState(telemetryModule, "loadedProvider", telemetryProvider);
Mockito.when(moduleManager.find(TelemetryModule.NAME)).thenReturn(telemetryModule);
nacosAddress = container.getHost() + ":" + container.getMappedPort(8848);
}
@Test
public void registerRemote() throws Exception {
final String serviceName = "register_remote";
ModuleProvider provider = createProvider(serviceName);
Address selfAddress = new Address("127.0.0.1", 1000, true);
RemoteInstance instance = new RemoteInstance(selfAddress);
getClusterRegister(provider).registerRemote(instance);
List<RemoteInstance> remoteInstances = queryRemoteNodes(provider, 1);
assertEquals(1, remoteInstances.size());
Address queryAddress = remoteInstances.get(0).getAddress();
assertEquals(selfAddress, queryAddress);
assertTrue(queryAddress.isSelf());
}
@Test
public void registerRemoteOfInternal() throws Exception {
final String serviceName = "register_remote_internal";
ModuleProvider provider = createProvider(serviceName, "127.0.1.2", 1000);
Address selfAddress = new Address("127.0.0.2", 1000, true);
RemoteInstance instance = new RemoteInstance(selfAddress);
getClusterRegister(provider).registerRemote(instance);
List<RemoteInstance> remoteInstances = queryRemoteNodes(provider, 1);
ClusterModuleNacosConfig config =
(ClusterModuleNacosConfig) provider.createConfigBeanIfAbsent();
assertEquals(1, remoteInstances.size());
Address queryAddress = remoteInstances.get(0).getAddress();
assertEquals(config.getInternalComHost(), queryAddress.getHost());
assertEquals(config.getInternalComPort(), queryAddress.getPort());
assertTrue(queryAddress.isSelf());
}
@Test
public void registerRemoteOfReceiver() throws Exception {
final String serviceName = "register_remote_receiver";
ModuleProvider providerA = createProvider(serviceName);
ModuleProvider providerB = createProvider(serviceName);
// Mixed or Aggregator
Address selfAddress = new Address("127.0.0.3", 1000, true);
RemoteInstance instance = new RemoteInstance(selfAddress);
getClusterRegister(providerA).registerRemote(instance);
// Receiver
List<RemoteInstance> remoteInstances = queryRemoteNodes(providerB, 1);
assertEquals(1, remoteInstances.size());
Address queryAddress = remoteInstances.get(0).getAddress();
assertEquals(selfAddress, queryAddress);
assertFalse(queryAddress.isSelf());
}
@Test
public void registerRemoteOfCluster() throws Exception {
final String serviceName = "register_remote_cluster";
ModuleProvider providerA = createProvider(serviceName);
ModuleProvider providerB = createProvider(serviceName);
Address addressA = new Address("127.0.0.4", 1000, true);
Address addressB = new Address("127.0.0.5", 1000, true);
RemoteInstance instanceA = new RemoteInstance(addressA);
RemoteInstance instanceB = new RemoteInstance(addressB);
getClusterRegister(providerA).registerRemote(instanceA);
getClusterRegister(providerB).registerRemote(instanceB);
List<RemoteInstance> remoteInstancesOfA = queryRemoteNodes(providerA, 2);
validateServiceInstance(addressA, addressB, remoteInstancesOfA);
List<RemoteInstance> remoteInstancesOfB = queryRemoteNodes(providerB, 2);
validateServiceInstance(addressB, addressA, remoteInstancesOfB);
}
@Test
public void deregisterRemoteOfCluster() throws Exception {
final String serviceName = "deregister_remote_cluster";
ModuleProvider providerA = createProvider(serviceName);
ModuleProvider providerB = createProvider(serviceName);
Address addressA = new Address("127.0.0.6", 1000, true);
Address addressB = new Address("127.0.0.7", 1000, true);
RemoteInstance instanceA = new RemoteInstance(addressA);
RemoteInstance instanceB = new RemoteInstance(addressB);
getClusterRegister(providerA).registerRemote(instanceA);
getClusterRegister(providerB).registerRemote(instanceB);
List<RemoteInstance> remoteInstancesOfA = queryRemoteNodes(providerA, 2);
validateServiceInstance(addressA, addressB, remoteInstancesOfA);
List<RemoteInstance> remoteInstancesOfB = queryRemoteNodes(providerB, 2);
validateServiceInstance(addressB, addressA, remoteInstancesOfB);
// deregister A
ClusterRegister register = getClusterRegister(providerA);
NamingService namingServiceA = Whitebox.getInternalState(register, "namingService");
namingServiceA.deregisterInstance(serviceName, addressA.getHost(), addressA.getPort());
// only B
remoteInstancesOfB = queryRemoteNodes(providerB, 1);
assertEquals(1, remoteInstancesOfB.size());
Address address = remoteInstancesOfB.get(0).getAddress();
assertEquals(addressB, address);
assertTrue(address.isSelf());
}
private ClusterModuleNacosProvider createProvider(String servicName)
throws ModuleStartException {
ClusterModuleNacosProvider provider = new ClusterModuleNacosProvider();
ClusterModuleNacosConfig config =
(ClusterModuleNacosConfig) provider.createConfigBeanIfAbsent();
config.setHostPort(nacosAddress);
config.setServiceName(servicName);
provider.setManager(moduleManager);
config.setUsername(username);
config.setPassword(password);
provider.prepare();
provider.start();
provider.notifyAfterCompleted();
return provider;
}
private ClusterModuleNacosProvider createProvider(String serviceName, String internalComHost,
int internalComPort) throws Exception {
ClusterModuleNacosProvider provider = new ClusterModuleNacosProvider();
ClusterModuleNacosConfig config =
(ClusterModuleNacosConfig) provider.createConfigBeanIfAbsent();
config.setHostPort(nacosAddress);
config.setServiceName(serviceName);
config.setUsername(username);
config.setPassword(password);
if (!StringUtil.isEmpty(internalComHost)) {
config.setInternalComHost(internalComHost);
}
if (internalComPort > 0) {
config.setInternalComPort(internalComPort);
}
provider.setManager(moduleManager);
provider.prepare();
provider.start();
provider.notifyAfterCompleted();
return provider;
}
private ClusterRegister getClusterRegister(ModuleProvider provider) {
return provider.getService(ClusterRegister.class);
}
private ClusterNodesQuery getClusterNodesQuery(ModuleProvider provider) {
return provider.getService(ClusterNodesQuery.class);
}
private List<RemoteInstance> queryRemoteNodes(ModuleProvider provider, int goals)
throws InterruptedException {
int i = 20;
do {
List<RemoteInstance> instances = getClusterNodesQuery(provider).queryRemoteNodes();
if (instances.size() == goals) {
return instances;
} else {
Thread.sleep(1000);
}
}
while (--i > 0);
return Collections.emptyList();
}
private void validateServiceInstance(Address selfAddress, Address otherAddress,
List<RemoteInstance> queryResult) {
assertEquals(2, queryResult.size());
boolean selfExist = false, otherExist = false;
for (RemoteInstance instance : queryResult) {
Address queryAddress = instance.getAddress();
if (queryAddress.equals(selfAddress) && queryAddress.isSelf()) {
selfExist = true;
} else if (queryAddress.equals(otherAddress) && !queryAddress.isSelf()) {
otherExist = true;
}
}
assertTrue(selfExist);
assertTrue(otherExist);
}
}
| |
/**
* (c) Copyright 2014 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.impl;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import javax.annotation.concurrent.NotThreadSafe;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.kiji.annotations.ApiAudience;
import org.kiji.schema.EntityId;
import org.kiji.schema.KijiCell;
import org.kiji.schema.KijiColumnName;
import org.kiji.schema.KijiDataRequest;
import org.kiji.schema.KijiDataRequest.Column;
import org.kiji.schema.KijiDataRequestBuilder;
import org.kiji.schema.KijiDataRequestBuilder.ColumnsDef;
import org.kiji.schema.KijiResult;
/**
* A {@link KijiResult} which proxies all calls to a pair of paged and materialized Kiji results.
*
* @param <T> The type of {@code KijiCell} values in the view.
*/
@NotThreadSafe
@ApiAudience.Private
public final class DefaultKijiResult<T> implements KijiResult<T> {
private final KijiDataRequest mDataRequest;
private final KijiResult<T> mMaterializedResult;
private final KijiResult<T> mPagedResult;
/**
* Construct a new {@code DefaultKijiResult} with the provided data request, paged kiji result,
* and materialized kiji result.
*
* @param dataRequest The data request for the result.
* @param materializedResult The materialized result.
* @param pagedResult The paged result.
*/
private DefaultKijiResult(
final KijiDataRequest dataRequest,
final KijiResult<T> materializedResult,
final KijiResult<T> pagedResult
) {
mDataRequest = dataRequest;
mMaterializedResult = materializedResult;
mPagedResult = pagedResult;
}
/**
* Create a new {@code DefaultKijiResult} with the provided data request, paged result,
* and materialized result.
*
* @param dataRequest The data request for the result.
* @param materializedResult The materialized result.
* @param pagedResult The paged result.
* @param <T> The type of {@code KijiCell} values in the result.
* @return A {@code KijiResult} wrapping the provided materialized and paged results.
*/
public static <T> KijiResult<T> create(
final KijiDataRequest dataRequest,
final KijiResult<T> materializedResult,
final KijiResult<T> pagedResult
) {
return new DefaultKijiResult<T>(dataRequest, materializedResult, pagedResult);
}
/** {@inheritDoc} */
@Override
public EntityId getEntityId() {
return mPagedResult.getEntityId();
}
/** {@inheritDoc} */
@Override
public KijiDataRequest getDataRequest() {
return mDataRequest;
}
/** {@inheritDoc} */
@Override
public Iterator<KijiCell<T>> iterator() {
return Iterables.concat(mMaterializedResult, mPagedResult).iterator();
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unchecked")
public <U extends T> KijiResult<U> narrowView(final KijiColumnName column) {
final KijiDataRequest narrowRequest = narrowRequest(column, mDataRequest);
if (narrowRequest.isEmpty()) {
return new EmptyKijiResult<U>(mMaterializedResult.getEntityId(), narrowRequest);
}
boolean containsPagedColumns = false;
boolean containsUnpagedColumns = false;
for (Column columnRequest : narrowRequest.getColumns()) {
if (columnRequest.isPagingEnabled()) {
containsPagedColumns = true;
} else {
containsUnpagedColumns = true;
}
if (containsPagedColumns && containsUnpagedColumns) {
return DefaultKijiResult.create(
narrowRequest,
mMaterializedResult.<U>narrowView(column),
mPagedResult.<U>narrowView(column));
}
}
if (containsPagedColumns) {
return mPagedResult.narrowView(column);
} else {
return mMaterializedResult.narrowView(column);
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
mPagedResult.close();
}
// -----------------------------------------------------------------------------------------------
// Helper methods
// -----------------------------------------------------------------------------------------------
/**
* Narrow a {@link KijiDataRequest} to a column. Will return a new data request. The column may
* be fully qualified or a family.
*
* @param column to narrow data request.
* @param dataRequest to narrow.
* @return a data request narrowed to the specified column.
*/
public static KijiDataRequest narrowRequest(
final KijiColumnName column,
final KijiDataRequest dataRequest
) {
final List<Column> columnRequests = getColumnRequests(column, dataRequest);
final KijiDataRequestBuilder builder = KijiDataRequest.builder();
builder.withTimeRange(dataRequest.getMinTimestamp(), dataRequest.getMaxTimestamp());
for (Column columnRequest : columnRequests) {
builder.newColumnsDef(columnRequest);
}
return builder.build();
}
/**
* Retrieve the column requests corresponding to a Kiji column in a {@code KijiDataRequest}.
*
* <p>
* If the requested column is fully qualified, and the request contains a family request
* containing the column, a new {@code Column} request will be created which corresponds to
* the requested family narrowed to the qualifier.
* </p>
*
* @param column a fully qualified {@link KijiColumnName}
* @param dataRequest the data request to get column request from.
* @return the column request.
*/
private static List<Column> getColumnRequests(
final KijiColumnName column,
final KijiDataRequest dataRequest
) {
final Column exactRequest = dataRequest.getColumn(column);
if (exactRequest != null) {
return ImmutableList.of(exactRequest);
}
if (column.isFullyQualified()) {
// The column is fully qualified, but a request doesn't exist for the qualified column.
// Check if the family is requested, and if so create a new qualified-column request from it.
final Column familyRequest =
dataRequest.getRequestForColumn(KijiColumnName.create(column.getFamily(), null));
if (familyRequest == null) {
return ImmutableList.of();
}
ColumnsDef columnDef = ColumnsDef
.create()
.withFilter(familyRequest.getFilter())
.withPageSize(familyRequest.getPageSize())
.withMaxVersions(familyRequest.getMaxVersions())
.add(column.getFamily(), column.getQualifier(), familyRequest.getReaderSpec());
return ImmutableList.of(
KijiDataRequest.builder().addColumns(columnDef).build().getColumn(column));
} else {
// The column is a family, but a request doesn't exist for the entire family add all requests
// for individual columns in the family.
ImmutableList.Builder<Column> columnRequests = ImmutableList.builder();
for (Column columnRequest : dataRequest.getColumns()) {
if (columnRequest.getColumnName().getFamily().equals(column.getFamily())) {
columnRequests.add(columnRequest);
}
}
return columnRequests.build();
}
}
}
| |
package com.example.tut1;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Set;
import java.util.UUID;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
public class Bluetooth extends Activity implements OnItemClickListener{
public static void disconnect(){
if (connectedThread != null) {
connectedThread.cancel();
connectedThread = null;
}
}
public static void gethandler(Handler handler){//Bluetooth handler
mHandler = handler;
}
static Handler mHandler = new Handler();
static ConnectedThread connectedThread;
public static final UUID MY_UUID = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB");
protected static final int SUCCESS_CONNECT = 0;
protected static final int MESSAGE_READ = 1;
ArrayAdapter<String> listAdapter;
ListView listView;
static BluetoothAdapter btAdapter;
Set<BluetoothDevice> devicesArray;
ArrayList<String> pairedDevices;
ArrayList<BluetoothDevice> devices;
IntentFilter filter;
BroadcastReceiver receiver;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_bluetooth);
init();
if (btAdapter==null){
Toast.makeText(getApplicationContext(), "No bluetooth detected", 0).show();
finish();
}else{
if (!btAdapter.isEnabled()){
turnOnBT();
}
getPairedDevices();
startDiscovery();
}
}
private void startDiscovery() {
// TODO Auto-generated method stub
btAdapter.cancelDiscovery();
btAdapter.startDiscovery();
}
private void turnOnBT() {
Intent intent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(intent, 1);
}
private void getPairedDevices() {
devicesArray = btAdapter.getBondedDevices();
if (devicesArray.size()>0){
for(BluetoothDevice device:devicesArray){
pairedDevices.add(device.getName());
}
}
}
private void init(){
listView = (ListView)findViewById(R.id.listView);
listView.setOnItemClickListener(this);
listAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1,0);
listView.setAdapter(listAdapter);
btAdapter = BluetoothAdapter.getDefaultAdapter();
pairedDevices = new ArrayList<String>();
filter = new IntentFilter(BluetoothDevice.ACTION_FOUND);
devices = new ArrayList<BluetoothDevice>();
receiver = new BroadcastReceiver(){
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if (BluetoothDevice.ACTION_FOUND.equals(action)){
BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
devices.add(device);
String s = "";
for(int a=0;a<pairedDevices.size();a++){
if (device.getName().equals(pairedDevices.get(a))){
//append
s = "(Paired)";
break;
}
}
listAdapter.add(device.getName()+" "+s+" "+"\n"+device.getAddress());
}else if (BluetoothAdapter.ACTION_DISCOVERY_STARTED.equals(action)){
}else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(action)){
}else if (BluetoothAdapter.ACTION_STATE_CHANGED.equals(action)){
if (btAdapter.getState() == btAdapter.STATE_OFF){
turnOnBT();
}
}
}
};
registerReceiver(receiver, filter);
IntentFilter filter = new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_STARTED);
registerReceiver(receiver, filter);
filter = new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_FINISHED);
registerReceiver(receiver, filter);
filter = new IntentFilter(BluetoothAdapter.ACTION_STATE_CHANGED);
}
@Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
unregisterReceiver(receiver);
}
protected void onActivityResult(int requestCode, int resultCode, Intent data){
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_CANCELED){
Toast.makeText(getApplicationContext(), "Bluetooth must be enabled to continue", Toast.LENGTH_SHORT).show();
finish();
}
}
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) {
// TODO Auto-generated method stub
if (btAdapter.isDiscovering()){
btAdapter.cancelDiscovery();
}
if (listAdapter.getItem(arg2).contains("(Paired)")){
BluetoothDevice selectedDevice = devices.get(arg2);
ConnectThread connect = new ConnectThread(selectedDevice);
connect.start();
finish();
}else {
Toast.makeText(getApplicationContext(), "device is not paired", 0).show();
}
}
private class ConnectThread extends Thread {
private final BluetoothSocket mmSocket;
private final BluetoothDevice mmDevice;
public ConnectThread(BluetoothDevice device) {
// Use a temporary object that is later assigned to mmSocket,
// because mmSocket is final
BluetoothSocket tmp = null;
mmDevice = device;
// Get a BluetoothSocket to connect with the given BluetoothDevice
try {
// MY_UUID is the app's UUID string, also used by the server code
tmp = device.createRfcommSocketToServiceRecord(MY_UUID);
} catch (IOException e) { }
mmSocket = tmp;
}
public void run() {
// Cancel discovery because it will slow down the connection
btAdapter.cancelDiscovery();
try {
// Connect the device through the socket. This will block
// until it succeeds or throws an exception
mmSocket.connect();
//connectedThread = new ConnectedThread(mmSocket);
} catch (IOException connectException) {
// Unable to connect; close the socket and get out
try {
mmSocket.close();
} catch (IOException closeException) { }
return;
}
// Do work to manage the connection (in a separate thread)
mHandler.obtainMessage(SUCCESS_CONNECT, mmSocket).sendToTarget();
}
/** Will cancel an in-progress connection, and close the socket */
public void cancel() {
try {
mmSocket.close();
} catch (IOException e) { }
}
}
static class ConnectedThread extends Thread {
private final BluetoothSocket mmSocket;
private final InputStream mmInStream;
private final OutputStream mmOutStream;
public ConnectedThread(BluetoothSocket socket) {
mmSocket = socket;
InputStream tmpIn = null;
OutputStream tmpOut = null;
// Get the input and output streams, using temp objects because
// member streams are final
try {
tmpIn = socket.getInputStream();
tmpOut = socket.getOutputStream();
} catch (IOException e) { }
mmInStream = tmpIn;
mmOutStream = tmpOut;
}
StringBuffer sbb = new StringBuffer();
public void run() {
byte[] buffer; // buffer store for the stream
int bytes; // bytes returned from read()
// Keep listening to the InputStream until an exception occurs
while (true) {
try {
try {
sleep(30);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
buffer = new byte[1024];
// Read from the InputStream
bytes = mmInStream.read(buffer);
// Send the obtained bytes to the UI activity
mHandler.obtainMessage(MESSAGE_READ, bytes, -1, buffer).sendToTarget();
} catch (IOException e) {
break;
}
}
}
/* Call this from the main activity to send data to the remote device */
public void write(String income) {
try {
mmOutStream.write(income.getBytes());
for(int i=0;i<income.getBytes().length;i++)
Log.v("outStream"+Integer.toString(i),Character.toString((char)(Integer.parseInt(Byte.toString(income.getBytes()[i])))));
try {
Thread.sleep(20);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (IOException e) { }
}
/* Call this from the main activity to shutdown the connection */
public void cancel() {
try {
mmSocket.close();
} catch (IOException e) { }
}
}
}
| |
/*
* Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.struct.image;
import org.junit.Test;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Random;
import static org.junit.Assert.*;
/**
* Standard tests for children of {@link ImageSingleBand}. Ensures that they contain
* all the expected functions and that they have the expected behavior. This is done
* through extensive use of reflections.
*
* @author Peter Abeles
*/
public abstract class StandardImageInterleavedTests {
public Random rand = new Random(234);
public abstract ImageInterleaved createImage(int width, int height, int numBands);
public abstract Number randomNumber();
/**
* Sets each element in the image to a random value.
*/
public void setRandom(ImageInterleaved img) {
Object data = img._getData();
int N = Array.getLength(data);
for (int i = 0; i < N; i++) {
Array.set(data, i, randomNumber());
}
}
/**
* Checks to see if the implementation specific to ImageInterleavedTests
* works
*/
@Test
public void isSubimage() {
ImageInterleaved a = createImage(10, 20, 3);
assertFalse(a.isSubimage());
assertTrue(a.subimage(0, 5, 0, 5).isSubimage());
assertTrue(a.subimage(2, 5, 2, 5).isSubimage());
}
/**
* Check for a positive case of get() and set()
*/
@Test
public void get_set() {
ImageInterleaved img = createImage(10, 20, 3);
setRandom(img);
Object expected = createPixelArray(img);
Object orig = call(img, "get", 2, null, 1, 1);
// make sure the two are not equal
assertFalse(compareArrays(expected, orig, img.getNumBands()));
// set the expected to the point in the image
call(img, "set", 2, expected, 1, 1);
Object found = call(img, "get", 2, null, 1, 1);
assertTrue(compareArrays(expected, found, img.getNumBands()));
}
/**
* Check for a positive case of get() and set()
*/
@Test
public void getBand_setBand() {
ImageInterleaved img = createImage(10, 20, 2);
setRandom(img);
Number expected = randomNumber();
Number orig = (Number) call(img, "getBand", 0, null, 1, 1, 0);
// make sure the two are not equal
assertFalse(expected.equals(orig));
// set the expected to the point in the image
call(img, "setBand", 1, expected, 1, 1, 0);
Number found = (Number) call(img, "getBand", 0, null, 1, 1, 0);
assertTrue(expected.doubleValue() == found.doubleValue());
}
/**
* Makes sure all the accessors do proper bounds checking
*/
@Test
public void accessorBounds() {
ImageInterleaved img = createImage(10, 20, 3);
checkBound(img, "get", 2, null);
checkBoundBand(img, "getBand", 0, null);
checkBound(img, "set", 2, null);
checkBoundBand(img, "setBand", 1, randomNumber());
}
private void checkBound(ImageInterleaved img, String method,
int type, Object typeData) {
checkException(img, method, type, typeData, -1, 0);
checkException(img, method, type, typeData, 0, -1);
checkException(img, method, type, typeData, img.getWidth(), 0);
checkException(img, method, type, typeData, 0, img.getHeight());
}
private void checkBoundBand(ImageInterleaved img, String method,
int type, Object typeData) {
checkException(img, method, type, typeData, -1, 0, 0);
checkException(img, method, type, typeData, 0, -1, 0);
checkException(img, method, type, typeData, img.getWidth(), 0, 0);
checkException(img, method, type, typeData, 0, img.getHeight(), 0);
checkException(img, method, type, typeData, 0, 0, img.getNumBands());
}
private void checkException(ImageInterleaved img, String method,
int type, Object typeData, int... where) {
boolean found = false;
try {
call(img, method, type, typeData, where);
} catch (ImageAccessException e) {
found = true;
}
assertTrue("No exception was thrown", found);
}
private Object call(ImageInterleaved img, String method,
int type, Object typeData, int... where) {
try {
Class<?>[] paramTypes = type == 0 ?
new Class<?>[where.length] : new Class<?>[where.length + 1];
Object[] args = new Object[paramTypes.length];
int index;
for (index = 0; index < where.length; index++) {
paramTypes[index] = int.class;
args[index] = where[index];
}
if (type == 1) {
paramTypes[index] = img.getDataType();
args[index] = typeData;
} else if (type == 2) {
String name = "[" + img.getDataType().getName().toUpperCase().charAt(0);
paramTypes[index] = Class.forName(name);
args[index] = typeData;
}
Method m = img.getClass().getMethod(method, paramTypes);
return m.invoke(img, args);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
} catch (NoSuchMethodException e) {
fail("The method " + method + " needs to be implemented");
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
throw (RuntimeException) e.getCause();
}
throw new RuntimeException("Shouldn't be here");
}
private Object createPixelArray(ImageInterleaved img) {
int numBands = img.getNumBands();
Object ret = Array.newInstance(img.getDataType(), numBands);
for (int i = 0; i < numBands; i++)
Array.set(ret, i, randomNumber());
return ret;
}
private boolean compareArrays(Object a, Object b, int length) {
for (int i = 0; i < length; i++) {
Number valA = (Number) Array.get(a, i);
Number valB = (Number) Array.get(b, i);
if (!valA.equals(valB))
return false;
}
return true;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.copyfiles;
import org.apache.commons.vfs2.NameScope;
import org.pentaho.di.job.entry.validator.AbstractFileValidator;
import org.pentaho.di.job.entry.validator.AndValidator;
import org.pentaho.di.job.entry.validator.JobEntryValidatorUtils;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Iterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSelectInfo;
import org.apache.commons.vfs2.FileSelector;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileType;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.job.entry.validator.ValidatorContext;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a 'copy files' job entry.
*
* @author Samatar Hassan
* @since 06-05-2007
*/
public class JobEntryCopyFiles extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntryCopyFiles.class; // for i18n purposes, needed by Translator2!!
public static final String SOURCE_CONFIGURATION_NAME = "source_configuration_name";
public static final String SOURCE_FILE_FOLDER = "source_filefolder";
public static final String DESTINATION_CONFIGURATION_NAME = "destination_configuration_name";
public static final String DESTINATION_FILE_FOLDER = "destination_filefolder";
public static final String LOCAL_SOURCE_FILE = "LOCAL-SOURCE-FILE-";
public static final String LOCAL_DEST_FILE = "LOCAL-DEST-FILE-";
public static final String STATIC_SOURCE_FILE = "STATIC-SOURCE-FILE-";
public static final String STATIC_DEST_FILE = "STATIC-DEST-FILE-";
public static final String DEST_URL = "EMPTY_DEST_URL-";
public static final String SOURCE_URL = "EMPTY_SOURCE_URL-";
public boolean copy_empty_folders;
public boolean arg_from_previous;
public boolean overwrite_files;
public boolean include_subfolders;
public boolean add_result_filesname;
public boolean remove_source_files;
public boolean destination_is_a_file;
public boolean create_destination_folder;
public String[] source_filefolder;
public String[] destination_filefolder;
public String[] wildcard;
HashSet<String> list_files_remove = new HashSet<>();
HashSet<String> list_add_result = new HashSet<>();
int NbrFail = 0;
private Map<String, String> configurationMappings = new HashMap<>();
public JobEntryCopyFiles( String n ) {
super( n, "" );
copy_empty_folders = true;
arg_from_previous = false;
source_filefolder = null;
remove_source_files = false;
destination_filefolder = null;
wildcard = null;
overwrite_files = false;
include_subfolders = false;
add_result_filesname = false;
destination_is_a_file = false;
create_destination_folder = false;
}
public JobEntryCopyFiles() {
this( "" );
}
public void allocate( int nrFields ) {
source_filefolder = new String[nrFields];
destination_filefolder = new String[nrFields];
wildcard = new String[nrFields];
}
public Object clone() {
JobEntryCopyFiles je = (JobEntryCopyFiles) super.clone();
if ( source_filefolder != null ) {
int nrFields = source_filefolder.length;
je.allocate( nrFields );
System.arraycopy( source_filefolder, 0, je.source_filefolder, 0, nrFields );
System.arraycopy( destination_filefolder, 0, je.destination_filefolder, 0, nrFields );
System.arraycopy( wildcard, 0, je.wildcard, 0, nrFields );
}
return je;
}
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "copy_empty_folders", copy_empty_folders ) );
retval.append( " " ).append( XMLHandler.addTagValue( "arg_from_previous", arg_from_previous ) );
retval.append( " " ).append( XMLHandler.addTagValue( "overwrite_files", overwrite_files ) );
retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", include_subfolders ) );
retval.append( " " ).append( XMLHandler.addTagValue( "remove_source_files", remove_source_files ) );
retval.append( " " ).append( XMLHandler.addTagValue( "add_result_filesname", add_result_filesname ) );
retval.append( " " ).append( XMLHandler.addTagValue( "destination_is_a_file", destination_is_a_file ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "create_destination_folder", create_destination_folder ) );
retval.append( " <fields>" ).append( Const.CR );
// Get source and destination files, also wildcard
String[] vsourcefilefolder = preprocessfilefilder( source_filefolder );
String[] vdestinationfilefolder = preprocessfilefilder( destination_filefolder );
if ( source_filefolder != null ) {
for ( int i = 0; i < source_filefolder.length; i++ ) {
retval.append( " <field>" ).append( Const.CR );
saveSource( retval, source_filefolder[i] );
saveDestination( retval, destination_filefolder[i] );
if ( parentJobMeta != null ) {
parentJobMeta.getNamedClusterEmbedManager().registerUrl( vsourcefilefolder[i] );
parentJobMeta.getNamedClusterEmbedManager().registerUrl( vdestinationfilefolder[i] );
}
retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildcard[i] ) );
retval.append( " </field>" ).append( Const.CR );
}
}
retval.append( " </fields>" ).append( Const.CR );
return retval.toString();
}
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
copy_empty_folders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "copy_empty_folders" ) );
arg_from_previous = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) );
overwrite_files = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "overwrite_files" ) );
include_subfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) );
remove_source_files = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "remove_source_files" ) );
add_result_filesname = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_result_filesname" ) );
destination_is_a_file = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "destination_is_a_file" ) );
create_destination_folder =
"Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "create_destination_folder" ) );
Node fields = XMLHandler.getSubNode( entrynode, "fields" );
// How many field arguments?
int nrFields = XMLHandler.countNodes( fields, "field" );
allocate( nrFields );
// Read them all...
for ( int i = 0; i < nrFields; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
source_filefolder[i] = loadSource( fnode );
destination_filefolder[i] = loadDestination( fnode );
wildcard[i] = XMLHandler.getTagValue( fnode, "wildcard" );
}
} catch ( KettleXMLException xe ) {
throw new KettleXMLException(
BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.UnableLoadXML" ), xe );
}
}
protected String loadSource( Node fnode ) {
String source_filefolder = XMLHandler.getTagValue( fnode, SOURCE_FILE_FOLDER );
String ncName = XMLHandler.getTagValue( fnode, SOURCE_CONFIGURATION_NAME );
return loadURL( source_filefolder, ncName, getMetaStore(), configurationMappings );
}
protected String loadDestination( Node fnode ) {
String destination_filefolder = XMLHandler.getTagValue( fnode, DESTINATION_FILE_FOLDER );
String ncName = XMLHandler.getTagValue( fnode, DESTINATION_CONFIGURATION_NAME );
return loadURL( destination_filefolder, ncName, getMetaStore(), configurationMappings );
}
protected void saveSource( StringBuilder retval, String source ) {
String namedCluster = configurationMappings.get( source );
retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_FILE_FOLDER, source ) );
retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_CONFIGURATION_NAME, namedCluster ) );
}
protected void saveDestination( StringBuilder retval, String destination ) {
String namedCluster = configurationMappings.get( destination );
retval.append( " " ).append( XMLHandler.addTagValue( DESTINATION_FILE_FOLDER, destination ) );
retval.append( " " ).append( XMLHandler.addTagValue( DESTINATION_CONFIGURATION_NAME, namedCluster ) );
}
protected String loadSourceRep( Repository rep, ObjectId id_jobentry, int a ) throws KettleException {
String source_filefolder = rep.getJobEntryAttributeString( id_jobentry, a, SOURCE_FILE_FOLDER );
String ncName = rep.getJobEntryAttributeString( id_jobentry, a, SOURCE_CONFIGURATION_NAME );
return loadURL( source_filefolder, ncName, getMetaStore(), configurationMappings );
}
protected String loadDestinationRep( Repository rep, ObjectId id_jobentry, int a ) throws KettleException {
String destination_filefolder = rep.getJobEntryAttributeString( id_jobentry, a, DESTINATION_FILE_FOLDER );
String ncName = rep.getJobEntryAttributeString( id_jobentry, a, DESTINATION_CONFIGURATION_NAME );
return loadURL( destination_filefolder, ncName, getMetaStore(), configurationMappings );
}
protected void saveSourceRep( Repository rep, ObjectId id_job, ObjectId id_jobentry, int i, String value )
throws KettleException {
String namedCluster = configurationMappings.get( value );
rep.saveJobEntryAttribute( id_job, getObjectId(), i, SOURCE_FILE_FOLDER, value );
rep.saveJobEntryAttribute( id_job, id_jobentry, i, SOURCE_CONFIGURATION_NAME, namedCluster );
}
protected void saveDestinationRep( Repository rep, ObjectId id_job, ObjectId id_jobentry, int i, String value )
throws KettleException {
String namedCluster = configurationMappings.get( value );
rep.saveJobEntryAttribute( id_job, getObjectId(), i, DESTINATION_FILE_FOLDER, value );
rep.saveJobEntryAttribute( id_job, id_jobentry, i, DESTINATION_CONFIGURATION_NAME, namedCluster );
}
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
copy_empty_folders = rep.getJobEntryAttributeBoolean( id_jobentry, "copy_empty_folders" );
arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
overwrite_files = rep.getJobEntryAttributeBoolean( id_jobentry, "overwrite_files" );
include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );
remove_source_files = rep.getJobEntryAttributeBoolean( id_jobentry, "remove_source_files" );
add_result_filesname = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filesname" );
destination_is_a_file = rep.getJobEntryAttributeBoolean( id_jobentry, "destination_is_a_file" );
create_destination_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_destination_folder" );
// How many arguments?
int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
allocate( argnr );
// Read them all...
for ( int a = 0; a < argnr; a++ ) {
source_filefolder[a] = loadSourceRep( rep, id_jobentry, a );
destination_filefolder[a] = loadDestinationRep( rep, id_jobentry, a );
wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
}
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.UnableLoadRep" )
+ id_jobentry, dbe );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "copy_empty_folders", copy_empty_folders );
rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", arg_from_previous );
rep.saveJobEntryAttribute( id_job, getObjectId(), "overwrite_files", overwrite_files );
rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", include_subfolders );
rep.saveJobEntryAttribute( id_job, getObjectId(), "remove_source_files", remove_source_files );
rep.saveJobEntryAttribute( id_job, getObjectId(), "add_result_filesname", add_result_filesname );
rep.saveJobEntryAttribute( id_job, getObjectId(), "destination_is_a_file", destination_is_a_file );
rep.saveJobEntryAttribute( id_job, getObjectId(), "create_destination_folder", create_destination_folder );
// save the arguments...
if ( source_filefolder != null ) {
for ( int i = 0; i < source_filefolder.length; i++ ) {
saveSourceRep( rep, id_job, getObjectId(), i, source_filefolder[i] );
saveDestinationRep( rep, id_job, getObjectId(), i, destination_filefolder[i] );
rep.saveJobEntryAttribute( id_job, getObjectId(), i, "wildcard", wildcard[i] );
}
}
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.UnableSaveRep" )
+ id_job, dbe );
}
}
String[] preprocessfilefilder( String[] folders ) {
List<String> nfolders = new ArrayList<>();
if ( folders != null ) {
for ( int i = 0; i < folders.length; i++ ) {
nfolders.add( folders[ i ].replace( JobEntryCopyFiles.SOURCE_URL + i + "-", "" )
.replace( JobEntryCopyFiles.DEST_URL + i + "-", "" ) );
}
}
return nfolders.toArray( new String[ nfolders.size() ] );
}
public Result execute( Result previousResult, int nr ) throws KettleException {
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
int NbrFail = 0;
NbrFail = 0;
if ( isBasic() ) {
logBasic( BaseMessages.getString( PKG, "JobCopyFiles.Log.Starting" ) );
}
//Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
parentJobMeta.getNamedClusterEmbedManager()
.passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
}
try {
// Get source and destination files, also wildcard
String[] vsourcefilefolder = preprocessfilefilder( source_filefolder );
String[] vdestinationfilefolder = preprocessfilefilder( destination_filefolder );
String[] vwildcard = wildcard;
result.setResult( false );
result.setNrErrors( 1 );
if ( arg_from_previous ) {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobCopyFiles.Log.ArgFromPrevious.Found", ( rows != null ? rows
.size() : 0 )
+ "" ) );
}
}
if ( arg_from_previous && rows != null ) { // Copy the input row to the (command line) arguments
for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
resultRow = rows.get( iteration );
// Get source and destination file names, also wildcard
String vsourcefilefolder_previous = resultRow.getString( 0, null );
String vdestinationfilefolder_previous = resultRow.getString( 1, null );
String vwildcard_previous = resultRow.getString( 2, null );
if ( !Utils.isEmpty( vsourcefilefolder_previous ) && !Utils.isEmpty( vdestinationfilefolder_previous ) ) {
if ( !processFileFolder( vsourcefilefolder_previous, vdestinationfilefolder_previous, vwildcard_previous,
parentJob, result ) ) {
// The copy process fail
NbrFail++;
}
} else {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobCopyFiles.Log.IgnoringRow",
KettleVFS.getFriendlyURI( environmentSubstitute( vsourcefilefolder[ iteration ] ) ),
KettleVFS.getFriendlyURI( environmentSubstitute( vdestinationfilefolder[ iteration ] ) ),
vwildcard[ iteration ] ) );
}
}
}
} else if ( vsourcefilefolder != null && vdestinationfilefolder != null ) {
for ( int i = 0; i < vsourcefilefolder.length && !parentJob.isStopped(); i++ ) {
if ( !Utils.isEmpty( vsourcefilefolder[i] ) && !Utils.isEmpty( vdestinationfilefolder[i] ) ) {
// ok we can process this file/folder
if ( !processFileFolder( vsourcefilefolder[i], vdestinationfilefolder[i], vwildcard[i], parentJob, result ) ) {
// The copy process fail
NbrFail++;
}
} else {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobCopyFiles.Log.IgnoringRow",
KettleVFS.getFriendlyURI( environmentSubstitute( vsourcefilefolder[ i ] ) ),
KettleVFS.getFriendlyURI( environmentSubstitute( vdestinationfilefolder[ i ] ) ), vwildcard[ i ] ) );
}
}
}
}
} finally {
list_add_result = null;
list_files_remove = null;
}
// Check if all files was process with success
if ( NbrFail == 0 ) {
result.setResult( true );
result.setNrErrors( 0 );
} else {
result.setNrErrors( NbrFail );
}
return result;
}
boolean processFileFolder( String sourcefilefoldername, String destinationfilefoldername, String wildcard,
Job parentJob, Result result ) {
boolean entrystatus = false;
FileObject sourcefilefolder = null;
FileObject destinationfilefolder = null;
// Clear list files to remove after copy process
// This list is also added to result files name
list_files_remove.clear();
list_add_result.clear();
// Get real source, destination file and wildcard
String realSourceFilefoldername = environmentSubstitute( sourcefilefoldername );
String realDestinationFilefoldername = environmentSubstitute( destinationfilefoldername );
String realWildcard = environmentSubstitute( wildcard );
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobCopyFiles.Log.ProcessingRow", KettleVFS.getFriendlyURI( realSourceFilefoldername ),
KettleVFS.getFriendlyURI( realDestinationFilefoldername ), realWildcard ) );
}
try {
sourcefilefolder = KettleVFS.getFileObject( realSourceFilefoldername, this );
destinationfilefolder = KettleVFS.getFileObject( realDestinationFilefoldername, this );
if ( sourcefilefolder.exists() ) {
// Check if destination folder/parent folder exists !
// If user wanted and if destination folder does not exist
// PDI will create it
if ( CreateDestinationFolder( destinationfilefolder ) ) {
// Basic Tests
if ( sourcefilefolder.getType().equals( FileType.FOLDER ) && destination_is_a_file ) {
// Source is a folder, destination is a file
// WARNING !!! CAN NOT COPY FOLDER TO FILE !!!
logError( BaseMessages.getString(
PKG, "JobCopyFiles.Log.CanNotCopyFolderToFile", KettleVFS.getFriendlyURI( realSourceFilefoldername ),
KettleVFS.getFriendlyURI( realDestinationFilefoldername ) ) );
NbrFail++;
} else {
if ( destinationfilefolder.getType().equals( FileType.FOLDER )
&& sourcefilefolder.getType().equals( FileType.FILE ) ) {
// Source is a file, destination is a folder
// Copy the file to the destination folder
destinationfilefolder.copyFrom( sourcefilefolder.getParent(), new TextOneFileSelector(
sourcefilefolder.getParent().toString(), sourcefilefolder.getName().getBaseName(),
destinationfilefolder.toString() ) );
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobCopyFiles.Log.FileCopied", KettleVFS.getFriendlyURI( sourcefilefolder ),
KettleVFS.getFriendlyURI( destinationfilefolder ) ) );
}
} else if ( sourcefilefolder.getType().equals( FileType.FILE ) && destination_is_a_file ) {
// Source is a file, destination is a file
destinationfilefolder.copyFrom( sourcefilefolder, new TextOneToOneFileSelector(
destinationfilefolder ) );
} else {
// Both source and destination are folders
if ( isDetailed() ) {
logDetailed( " " );
logDetailed( BaseMessages.getString( PKG, "JobCopyFiles.Log.FetchFolder", KettleVFS.getFriendlyURI( sourcefilefolder ) ) );
}
TextFileSelector textFileSelector =
new TextFileSelector( sourcefilefolder, destinationfilefolder, realWildcard, parentJob );
try {
destinationfilefolder.copyFrom( sourcefilefolder, textFileSelector );
} finally {
textFileSelector.shutdown();
}
}
// Remove Files if needed
if ( remove_source_files && !list_files_remove.isEmpty() ) {
String sourceFilefoldername = sourcefilefolder.toString();
int trimPathLength = sourceFilefoldername.length() + 1;
FileObject removeFile;
for ( Iterator<String> iter = list_files_remove.iterator(); iter.hasNext() && !parentJob.isStopped(); ) {
String fileremoventry = iter.next();
removeFile = null; // re=null each iteration
// Try to get the file relative to the existing connection
if ( fileremoventry.startsWith( sourceFilefoldername ) ) {
if ( trimPathLength < fileremoventry.length() ) {
removeFile = sourcefilefolder.getChild( fileremoventry.substring( trimPathLength ) );
}
}
// Unable to retrieve file through existing connection; Get the file through a new VFS connection
if ( removeFile == null ) {
removeFile = KettleVFS.getFileObject( fileremoventry, this );
}
// Remove ONLY Files
if ( removeFile.getType() == FileType.FILE ) {
boolean deletefile = removeFile.delete();
logBasic( " ------ " );
if ( !deletefile ) {
logError( " "
+ BaseMessages.getString(
PKG, "JobCopyFiles.Error.Exception.CanRemoveFileFolder", KettleVFS.getFriendlyURI( fileremoventry ) ) );
} else {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FileFolderRemoved", KettleVFS.getFriendlyURI( fileremoventry ) ) );
}
}
}
}
}
// Add files to result files name
if ( add_result_filesname && !list_add_result.isEmpty() ) {
String destinationFilefoldername = destinationfilefolder.toString();
int trimPathLength = destinationFilefoldername.length() + 1;
FileObject addFile;
for ( Iterator<String> iter = list_add_result.iterator(); iter.hasNext(); ) {
String fileaddentry = iter.next();
addFile = null; // re=null each iteration
// Try to get the file relative to the existing connection
if ( fileaddentry.startsWith( destinationFilefoldername ) ) {
if ( trimPathLength < fileaddentry.length() ) {
addFile = destinationfilefolder.getChild( fileaddentry.substring( trimPathLength ) );
}
}
// Unable to retrieve file through existing connection; Get the file through a new VFS connection
if ( addFile == null ) {
addFile = KettleVFS.getFileObject( fileaddentry, this );
}
// Add ONLY Files
if ( addFile.getType() == FileType.FILE ) {
ResultFile resultFile =
new ResultFile( ResultFile.FILE_TYPE_GENERAL, addFile, parentJob.getJobname(), toString() );
result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages
.getString( PKG, "JobCopyFiles.Log.FileAddedToResultFilesName", KettleVFS.getFriendlyURI( fileaddentry ) ) );
}
}
}
}
}
entrystatus = true;
} else {
// Destination Folder or Parent folder is missing
logError( BaseMessages.getString(
PKG, "JobCopyFiles.Error.DestinationFolderNotFound", KettleVFS.getFriendlyURI( realDestinationFilefoldername ) ) );
}
} else {
logError( BaseMessages.getString( PKG, "JobCopyFiles.Error.SourceFileNotExists", KettleVFS.getFriendlyURI( realSourceFilefoldername ) ) );
}
} catch ( FileSystemException fse ) {
logError( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.CopyProcessFileSystemException", fse
.getMessage() ) );
Throwable throwable = fse.getCause();
while ( throwable != null ) {
logError( BaseMessages.getString( PKG, "JobCopyFiles.Log.CausedBy", throwable.getMessage() ) );
throwable = throwable.getCause();
}
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobCopyFiles.Error.Exception.CopyProcess", KettleVFS.getFriendlyURI( realSourceFilefoldername ),
KettleVFS.getFriendlyURI( realDestinationFilefoldername ), e.getMessage() ), e );
} finally {
if ( sourcefilefolder != null ) {
try {
sourcefilefolder.close();
sourcefilefolder = null;
} catch ( IOException ex ) { /* Ignore */
}
}
if ( destinationfilefolder != null ) {
try {
destinationfilefolder.close();
destinationfilefolder = null;
} catch ( IOException ex ) { /* Ignore */
}
}
}
return entrystatus;
}
private class TextOneToOneFileSelector implements FileSelector {
FileObject destfile = null;
public TextOneToOneFileSelector( FileObject destinationfile ) {
if ( destinationfile != null ) {
destfile = destinationfile;
}
}
public boolean includeFile( FileSelectInfo info ) {
boolean resultat = false;
String fil_name = null;
try {
// check if the destination file exists
if ( destfile.exists() ) {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FileExists", KettleVFS.getFriendlyURI( destfile ) ) );
}
if ( overwrite_files ) {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FileOverwrite", KettleVFS.getFriendlyURI( destfile ) ) );
}
resultat = true;
}
} else {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FileCopied", KettleVFS.getFriendlyURI( info.getFile() ), KettleVFS.getFriendlyURI( destfile ) ) );
}
resultat = true;
}
if ( resultat && remove_source_files ) {
// add this folder/file to remove files
// This list will be fetched and all entries files
// will be removed
list_files_remove.add( info.getFile().toString() );
}
if ( resultat && add_result_filesname ) {
// add this folder/file to result files name
list_add_result.add( destfile.toString() );
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.CopyProcess", KettleVFS.getFriendlyURI( info
.getFile() ), fil_name, e.getMessage() ) );
}
return resultat;
}
public boolean traverseDescendents( FileSelectInfo info ) {
return false;
}
}
private boolean CreateDestinationFolder( FileObject filefolder ) {
FileObject folder = null;
try {
if ( destination_is_a_file ) {
folder = filefolder.getParent();
} else {
folder = filefolder;
}
if ( !folder.exists() ) {
if ( create_destination_folder ) {
if ( isDetailed() ) {
logDetailed( "Folder " + KettleVFS.getFriendlyURI( folder ) + " does not exist !" );
}
folder.createFolder();
if ( isDetailed() ) {
logDetailed( "Folder parent was created." );
}
} else {
logError( "Folder " + KettleVFS.getFriendlyURI( folder ) + " does not exist !" );
return false;
}
}
return true;
} catch ( Exception e ) {
logError( "Couldn't created parent folder " + KettleVFS.getFriendlyURI( folder ), e );
} finally {
if ( folder != null ) {
try {
folder.close();
folder = null;
} catch ( Exception ex ) { /* Ignore */
}
}
}
return false;
}
private class TextFileSelector implements FileSelector {
String fileWildcard = null;
String sourceFolder = null;
String destinationFolder = null;
Job parentjob;
Pattern pattern;
private int traverseCount;
// Store connection to destination source for improved performance to remote hosts
FileObject destinationFolderObject = null;
/**********************************************************
*
* @param selectedfile
* @return True if the selectedfile matches the wildcard
**********************************************************/
private boolean GetFileWildcard( String selectedfile ) {
boolean getIt = true;
// First see if the file matches the regular expression!
if ( pattern != null ) {
Matcher matcher = pattern.matcher( selectedfile );
getIt = matcher.matches();
}
return getIt;
}
public TextFileSelector( FileObject sourcefolderin, FileObject destinationfolderin, String filewildcard,
Job parentJob ) {
if ( sourcefolderin != null ) {
sourceFolder = sourcefolderin.toString();
}
if ( destinationfolderin != null ) {
destinationFolderObject = destinationfolderin;
destinationFolder = destinationFolderObject.toString();
}
if ( !Utils.isEmpty( filewildcard ) ) {
fileWildcard = filewildcard;
pattern = Pattern.compile( fileWildcard );
}
parentjob = parentJob;
}
public boolean includeFile( FileSelectInfo info ) {
boolean returncode = false;
FileObject file_name = null;
String addFileNameString = null;
try {
if ( !info.getFile().toString().equals( sourceFolder ) && !parentjob.isStopped() ) {
// Pass over the Base folder itself
String short_filename = info.getFile().getName().getBaseName();
// Built destination filename
if ( destinationFolderObject == null ) {
// Resolve the destination folder
destinationFolderObject = KettleVFS.getFileObject( destinationFolder, JobEntryCopyFiles.this );
}
String fullName = info.getFile().toString();
String baseFolder = info.getBaseFolder().toString();
String path = fullName.substring( fullName.indexOf( baseFolder ) + baseFolder.length() + 1 );
file_name = destinationFolderObject.resolveFile( path, NameScope.DESCENDENT );
if ( !info.getFile().getParent().equals( info.getBaseFolder() ) ) {
// Not in the Base Folder..Only if include sub folders
if ( include_subfolders ) {
// Folders..only if include subfolders
if ( info.getFile().getType() == FileType.FOLDER ) {
if ( include_subfolders && copy_empty_folders && Utils.isEmpty( fileWildcard ) ) {
if ( ( file_name == null ) || ( !file_name.exists() ) ) {
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FolderCopied", KettleVFS.getFriendlyURI( info
.getFile() ), file_name != null ? KettleVFS.getFriendlyURI( file_name ) : "" ) );
}
returncode = true;
} else {
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FolderExists", KettleVFS.getFriendlyURI( file_name ) ) );
}
if ( overwrite_files ) {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FolderOverwrite", KettleVFS.getFriendlyURI( info
.getFile() ), KettleVFS.getFriendlyURI( file_name ) ) );
}
returncode = true;
}
}
}
} else {
if ( GetFileWildcard( short_filename ) ) {
// Check if the file exists
if ( ( file_name == null ) || ( !file_name.exists() ) ) {
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages.getString(
PKG, "JobCopyFiles.Log.FileCopied", KettleVFS.getFriendlyURI( info.getFile() ), file_name != null
? KettleVFS.getFriendlyURI( file_name ) : "" ) );
}
returncode = true;
} else {
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FileExists", KettleVFS.getFriendlyURI( file_name ) ) );
}
if ( overwrite_files ) {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FileExists", KettleVFS.getFriendlyURI( info
.getFile() ), KettleVFS.getFriendlyURI( file_name ) ) );
}
returncode = true;
}
}
}
}
}
} else {
// In the Base Folder...
// Folders..only if include subfolders
if ( info.getFile().getType() == FileType.FOLDER ) {
if ( include_subfolders && copy_empty_folders && Utils.isEmpty( fileWildcard ) ) {
if ( ( file_name == null ) || ( !file_name.exists() ) ) {
if ( isDetailed() ) {
logDetailed( "", " ------ " );
logDetailed( " "
+ BaseMessages.getString(
PKG, "JobCopyFiles.Log.FolderCopied", KettleVFS.getFriendlyURI( info.getFile() ), file_name != null
? KettleVFS.getFriendlyURI( file_name ) : "" ) );
}
returncode = true;
} else {
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FolderExists", KettleVFS.getFriendlyURI( file_name ) ) );
}
if ( overwrite_files ) {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FolderOverwrite", KettleVFS.getFriendlyURI( info
.getFile() ), KettleVFS.getFriendlyURI( file_name ) ) );
}
returncode = true;
}
}
}
} else {
// file...Check if exists
file_name = KettleVFS.getFileObject( destinationFolder + Const.FILE_SEPARATOR + short_filename );
if ( GetFileWildcard( short_filename ) ) {
if ( ( file_name == null ) || ( !file_name.exists() ) ) {
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages.getString(
PKG, "JobCopyFiles.Log.FileCopied", KettleVFS.getFriendlyURI( info.getFile() ), file_name != null
? KettleVFS.getFriendlyURI( file_name ) : "" ) );
}
returncode = true;
} else {
if ( isDetailed() ) {
logDetailed( " ------ " );
logDetailed( " "
+ BaseMessages.getString( PKG, "JobCopyFiles.Log.FileExists", KettleVFS.getFriendlyURI( file_name ) ) );
}
if ( overwrite_files ) {
if ( isDetailed() ) {
logDetailed(
" " + BaseMessages.getString( PKG, "JobCopyFiles.Log.FileExistsInfos" ),
BaseMessages.getString(
PKG, "JobCopyFiles.Log.FileExists", KettleVFS.getFriendlyURI( info.getFile() ), KettleVFS.getFriendlyURI( file_name ) ) );
}
returncode = true;
}
}
}
}
}
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.CopyProcess", KettleVFS.getFriendlyURI( info
.getFile() ), file_name != null ? KettleVFS.getFriendlyURI( file_name ) : null, e.getMessage() ) );
returncode = false;
} finally {
if ( file_name != null ) {
try {
if ( returncode && add_result_filesname ) {
addFileNameString = file_name.toString();
}
file_name.close();
file_name = null;
} catch ( IOException ex ) { /* Ignore */
}
}
}
if ( returncode && remove_source_files ) {
// add this folder/file to remove files
// This list will be fetched and all entries files
// will be removed
list_files_remove.add( info.getFile().toString() );
}
if ( returncode && add_result_filesname ) {
// add this folder/file to result files name
list_add_result.add( addFileNameString ); // was a NPE before with the file_name=null above in the finally
}
return returncode;
}
public boolean traverseDescendents( FileSelectInfo info ) {
return ( traverseCount++ == 0 || include_subfolders );
}
public void shutdown() {
if ( destinationFolderObject != null ) {
try {
destinationFolderObject.close();
} catch ( IOException ex ) { /* Ignore */
}
}
}
}
private class TextOneFileSelector implements FileSelector {
String filename = null;
String foldername = null;
String destfolder = null;
private int traverseCount;
public TextOneFileSelector( String sourcefolderin, String sourcefilenamein, String destfolderin ) {
if ( !Utils.isEmpty( sourcefilenamein ) ) {
filename = sourcefilenamein;
}
if ( !Utils.isEmpty( sourcefolderin ) ) {
foldername = sourcefolderin;
}
if ( !Utils.isEmpty( destfolderin ) ) {
destfolder = destfolderin;
}
}
public boolean includeFile( FileSelectInfo info ) {
boolean resultat = false;
String fil_name = null;
try {
if ( info.getFile().getType() == FileType.FILE ) {
if ( info.getFile().getName().getBaseName().equals( filename )
&& ( info.getFile().getParent().toString().equals( foldername ) ) ) {
// check if the file exists
fil_name = destfolder + Const.FILE_SEPARATOR + filename;
if ( KettleVFS.getFileObject( fil_name, JobEntryCopyFiles.this ).exists() ) {
if ( isDetailed() ) {
logDetailed( " " + BaseMessages.getString( PKG, "JobCopyFiles.Log.FileExists", KettleVFS.getFriendlyURI( fil_name ) ) );
}
if ( overwrite_files ) {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString(
PKG, "JobCopyFiles.Log.FileOverwrite", KettleVFS.getFriendlyURI( info.getFile() ), KettleVFS.getFriendlyURI( fil_name ) ) );
}
resultat = true;
}
} else {
if ( isDetailed() ) {
logDetailed( " "
+ BaseMessages.getString(
PKG, "JobCopyFiles.Log.FileCopied", KettleVFS.getFriendlyURI( info.getFile() ), KettleVFS.getFriendlyURI( fil_name ) ) );
}
resultat = true;
}
}
if ( resultat && remove_source_files ) {
// add this folder/file to remove files
// This list will be fetched and all entries files
// will be removed
list_files_remove.add( info.getFile().toString() );
}
if ( resultat && add_result_filesname ) {
// add this folder/file to result files name
list_add_result.add( KettleVFS.getFileObject( fil_name, JobEntryCopyFiles.this ).toString() );
}
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.CopyProcess", KettleVFS.getFriendlyURI( info
.getFile() ), KettleVFS.getFriendlyURI( fil_name ), e.getMessage() ) );
resultat = false;
}
return resultat;
}
public boolean traverseDescendents( FileSelectInfo info ) {
return ( traverseCount++ == 0 || include_subfolders );
}
}
public void setCopyEmptyFolders( boolean copy_empty_foldersin ) {
this.copy_empty_folders = copy_empty_foldersin;
}
public boolean isCopyEmptyFolders() {
return copy_empty_folders;
}
public void setoverwrite_files( boolean overwrite_filesin ) {
this.overwrite_files = overwrite_filesin;
}
public boolean isoverwrite_files() {
return overwrite_files;
}
public void setIncludeSubfolders( boolean include_subfoldersin ) {
this.include_subfolders = include_subfoldersin;
}
public boolean isIncludeSubfolders() {
return include_subfolders;
}
public void setAddresultfilesname( boolean add_result_filesnamein ) {
this.add_result_filesname = add_result_filesnamein;
}
public boolean isAddresultfilesname() {
return add_result_filesname;
}
public void setArgFromPrevious( boolean argfrompreviousin ) {
this.arg_from_previous = argfrompreviousin;
}
public boolean isArgFromPrevious() {
return arg_from_previous;
}
public void setRemoveSourceFiles( boolean remove_source_filesin ) {
this.remove_source_files = remove_source_filesin;
}
public boolean isRemoveSourceFiles() {
return remove_source_files;
}
public void setDestinationIsAFile( boolean destination_is_a_file ) {
this.destination_is_a_file = destination_is_a_file;
}
public boolean isDestinationIsAFile() {
return destination_is_a_file;
}
public void setCreateDestinationFolder( boolean create_destination_folder ) {
this.create_destination_folder = create_destination_folder;
}
public boolean isCreateDestinationFolder() {
return create_destination_folder;
}
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
boolean res = JobEntryValidatorUtils.andValidator().validate( this, "arguments", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) );
if ( !res ) {
return;
}
ValidatorContext ctx = new ValidatorContext();
AbstractFileValidator.putVariableSpace( ctx, getVariables() );
AndValidator.putValidators( ctx, JobEntryValidatorUtils.notNullValidator(), JobEntryValidatorUtils.fileExistsValidator() );
for ( int i = 0; i < source_filefolder.length; i++ ) {
JobEntryValidatorUtils.andValidator().validate( this, "arguments[" + i + "]", remarks, ctx );
}
}
public boolean evaluates() {
return true;
}
public String loadURL( String url, String ncName, IMetaStore metastore, Map<String, String> mappings ) {
if ( !Utils.isEmpty( ncName ) && !Utils.isEmpty( url ) ) {
mappings.put( url, ncName );
}
return url;
}
public void setConfigurationMappings( Map<String, String> mappings ) {
this.configurationMappings = mappings;
}
public String getConfigurationBy( String url ) {
return this.configurationMappings.get( url );
}
public String getUrlPath( String incomingURL ) {
String path = null;
try {
String noVariablesURL = incomingURL.replaceAll( "[${}]", "/" );
FileName fileName = KettleVFS.getInstance().getFileSystemManager().resolveURI( noVariablesURL );
String root = fileName.getRootURI();
path = incomingURL.substring( root.length() - 1 );
} catch ( FileSystemException e ) {
path = null;
}
return path;
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.component.cover;
import org.sakaiproject.component.cover.ComponentManager;
import java.util.List;
import java.util.Map;
/**
* <p>
* ServerConfigurationService is a static Cover for the {@link org.sakaiproject.component.api.ServerConfigurationService ServerConfigurationService}; see that interface for usage details.
* </p>
*
* @version $Revision$
*/
public class ServerConfigurationService
{
public final static String CURRENT_SERVER_URL = org.sakaiproject.component.api.ServerConfigurationService.CURRENT_SERVER_URL;
public final static String CURRENT_PORTAL_PATH = org.sakaiproject.component.api.ServerConfigurationService.CURRENT_PORTAL_PATH;
/**
* Access the component instance: special cover only method.
*
* @return the component instance.
*/
public static org.sakaiproject.component.api.ServerConfigurationService getInstance()
{
if (ComponentManager.CACHE_COMPONENTS)
{
if (m_instance == null)
m_instance = (org.sakaiproject.component.api.ServerConfigurationService) ComponentManager
.get(org.sakaiproject.component.api.ServerConfigurationService.class);
return m_instance;
}
else
{
return (org.sakaiproject.component.api.ServerConfigurationService) ComponentManager
.get(org.sakaiproject.component.api.ServerConfigurationService.class);
}
}
private static org.sakaiproject.component.api.ServerConfigurationService m_instance = null;
public static java.lang.String SERVICE_NAME = org.sakaiproject.component.api.ServerConfigurationService.SERVICE_NAME;
public static java.lang.String getServerId()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getServerId();
}
public static java.lang.String getServerInstance()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getServerInstance();
}
public static java.lang.String getServerIdInstance()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getServerIdInstance();
}
public static java.lang.String getServerName()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getServerName();
}
public static java.lang.String getServerUrl()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getServerUrl();
}
public static java.lang.String getAccessUrl()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getAccessUrl();
}
public static java.lang.String getAccessPath()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getAccessPath();
}
public static java.lang.String getHelpUrl(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getHelpUrl(param0);
}
public static java.lang.String getPortalUrl()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getPortalUrl();
}
public static java.lang.String getToolUrl()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getToolUrl();
}
public static java.lang.String getGatewaySiteId()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getGatewaySiteId();
}
public static java.lang.String getLoggedOutUrl()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getLoggedOutUrl();
}
public static java.lang.String getUserHomeUrl()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getUserHomeUrl();
}
public static java.lang.String getSakaiHomePath()
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getSakaiHomePath();
}
public static boolean getBoolean(java.lang.String param0, boolean param1)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return false;
return service.getBoolean(param0, param1);
}
public static java.lang.String getString(java.lang.String param0, java.lang.String param1)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getString(param0, param1);
}
public static java.lang.String getString(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getString(param0);
}
public static java.lang.String[] getStrings(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getStrings(param0);
}
public static java.util.List getToolOrder(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getToolOrder(param0);
}
/**
* Access the list of tools by group
*
* @param category
* The tool category
* @return An unordered list of tool ids (String) in selected group, or an empty list if there are none for this category.
*/
public static java.util.List getToolsRequired(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getToolsRequired(param0);
}
/**
* Access the list of groups by category (site type)
*
* @param category
* The tool category
* @return An ordered list of tool ids (String) indicating the desired tool display order, or an empty list if there are none for this category.
*/
public static java.util.List getCategoryGroups(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getCategoryGroups(param0);
}
/*
* Returns true if selected tool is contained in pre-initialized list of selected items
* @parms toolId id of the selected tool
*/
public static boolean toolGroupIsSelected(String param0,String param1) {
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return false;
return service.toolGroupIsSelected(param0,param1);
}
/*
* Returns true if selected tool is contained in pre-initialized list of required items
* @parms toolId id of the selected tool
*/
public static boolean toolGroupIsRequired(String param0, String param1) {
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return false;
return service.toolGroupIsRequired(param0,param1);
}
public static java.util.List getToolGroup(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getToolGroup(param0);
}
public static java.util.List getDefaultTools(java.lang.String param0)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getDefaultTools(param0);
}
public static int getInt(java.lang.String param0, int param1)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return 0;
return service.getInt(param0, param1);
}
/**
* access the list of tool categories for the given site type
*
* @param category the site type
* @return a list of tool category ids in order
*/
public static List<String> getToolCategories(String category)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getToolCategories(category);
}
/**
* access the map of tool categories to tool ids for this site type
* @param category the site type
* @return a map of tool category ids to tool ids
*/
public static Map<String, List<String>> getToolCategoriesAsMap(String category)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getToolCategoriesAsMap(category);
}
/**
* access a map of tool id to tool category id for this site type
* @param category the site type
* @return map with tool id as key and category id as value
*/
public static Map<String, String> getToolToCategoryMap(String category)
{
org.sakaiproject.component.api.ServerConfigurationService service = getInstance();
if (service == null) return null;
return service.getToolToCategoryMap(category);
}
}
| |
/*
* Copyright 2009 Salomo Petrus
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package nl.tranquilizedquality.itest.cargo;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import nl.tranquilizedquality.itest.cargo.exception.ConfigurationException;
import nl.tranquilizedquality.itest.cargo.exception.DeployException;
import nl.tranquilizedquality.itest.domain.DeployableLocationConfiguration;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.cargo.container.ContainerType;
import org.codehaus.cargo.container.InstalledLocalContainer;
import org.codehaus.cargo.container.configuration.ConfigurationType;
import org.codehaus.cargo.container.configuration.LocalConfiguration;
import org.codehaus.cargo.container.deployable.Deployable;
import org.codehaus.cargo.container.deployable.DeployableType;
import org.codehaus.cargo.container.property.GeneralPropertySet;
import org.codehaus.cargo.container.property.ServletPropertySet;
import org.codehaus.cargo.container.tomcat.TomcatPropertySet;
import org.codehaus.cargo.generic.DefaultContainerFactory;
import org.codehaus.cargo.generic.configuration.ConfigurationFactory;
import org.codehaus.cargo.generic.configuration.DefaultConfigurationFactory;
import org.codehaus.cargo.generic.deployable.DefaultDeployableFactory;
import org.codehaus.cargo.util.log.FileLogger;
import org.codehaus.cargo.util.log.LogLevel;
import org.codehaus.cargo.util.log.Logger;
import org.springframework.beans.factory.annotation.Required;
/**
* AbstractTomcatContainerUtil is an implementation of {@link ContainerUtil}
* which managaes a Tomcat servlet container. It can configure, start and stop
* the Tomcat servlet container.
*
* @author Salomo Petrus
* @author Enric Ballo
*
*/
public abstract class AbstractTomcatContainerUtil extends AbstractInstalledContainerUtil {
/** Logger for this class */
private static final Log LOGGER = LogFactory.getLog(AbstractTomcatContainerUtil.class);
/**
* The AJP (Apache JServ Protocol) port may be used by a web server (such as
* the Apache httpd server) to communicate with Tomcat. This port is also
* used if you set up a load-balanced server. Use the property
* ${cargo.server.ajp.port} to set the port dynamically and set the system
* properties with this value.
*
* Default value for Tomcat: 8009
*/
protected Integer ajpPort;
/**
* The port to use when communicating with this server, for example to start
* and stop it
*
* Default value for Tomcat: 8005
*/
protected Integer rmiPort;
protected String tomcatVersion;
/**
* Default constructor that will detect which OS is used to make sure the
* Tomcat will be downloaded in the correct location.
*/
public AbstractTomcatContainerUtil() {
setContainerName("Tomcat");
setupContainerHome();
}
/**
* Installs the container and the application configuration. It also sets
* some system properties so the container can startup properly. Finally it
* sets up additional configuration like jndi.proprties files etc.
*/
@Override
protected void setupContainer() {
/*
* Execute default setup behavior.
*/
super.setupContainer();
systemProperties.put(TomcatPropertySet.AJP_PORT, ajpPort.toString());
systemProperties.put(GeneralPropertySet.RMI_PORT, rmiPort.toString());
setupConfiguration();
}
/**
* Deploys the application to the correct
*/
@Override
protected void deploy() {
// create configuration factory
final ConfigurationFactory configurationFactory = new DefaultConfigurationFactory();
// create JBoss configuration
final LocalConfiguration configuration = (LocalConfiguration) configurationFactory.createConfiguration(tomcatVersion,
ContainerType.INSTALLED, ConfigurationType.EXISTING, containerHome);
// setup configuration
final StringBuilder args = new StringBuilder();
for (final String arg : jvmArguments) {
args.append(arg);
args.append(" ");
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Added JVM argument: " + arg);
}
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("== CONFIGURATION PROPERTIES ==");
LOGGER.debug("CONTAINER PORT : " + containerPort);
LOGGER.debug("AJP PORT : " + ajpPort);
LOGGER.debug("RMI PORT : " + rmiPort);
}
configuration.setProperty(GeneralPropertySet.JVMARGS, args.toString());
configuration.setProperty(ServletPropertySet.PORT, containerPort.toString());
configuration.setProperty(TomcatPropertySet.AJP_PORT, ajpPort.toString());
configuration.setProperty(GeneralPropertySet.RMI_PORT, rmiPort.toString());
/*
* Iterate over all available deployable locations.
*/
final Set<Entry<String, String>> entrySet = deployableLocations.entrySet();
final Iterator<Entry<String, String>> iterator = entrySet.iterator();
while (iterator.hasNext()) {
final Entry<String, String> entry = iterator.next();
final String key = entry.getKey();
final String value = entry.getValue();
DeployableType deployableType = null;
/*
* Determine the deployable type.
*/
deployableType = determineDeployableType(value);
/*
* Add the deployable.
*/
addDeployable(configuration, key, deployableType);
}
/*
* Iterate over all available deployable location configurations.
*/
for (final DeployableLocationConfiguration config : deployableLocationConfigurations) {
final String contextName = config.getContextName();
final String type = config.getType();
String path = config.getPath();
/*
* Determine deployable type.
*/
DeployableType deployableType = null;
if (contextName != null && contextName.length() > 0) {
deployableType = determineDeployableType(type);
if (DeployableType.WAR.equals(deployableType)) {
final File srcFile = new File(path);
final File destFile = new File("target/" + contextName + ".war");
try {
FileUtils.copyFile(srcFile, destFile);
} catch (final IOException e) {
throw new DeployException("Failed to copy WAR file: " + path, e);
}
path = destFile.getPath();
}
} else {
deployableType = determineDeployableType(type);
}
/*
* Add the deployable
*/
addDeployable(configuration, path, deployableType);
}
// create installedLocalContainer
installedLocalContainer = (InstalledLocalContainer) new DefaultContainerFactory().createContainer(tomcatVersion,
ContainerType.INSTALLED, configuration);
// configure installedLocalContainer
installedLocalContainer.setHome(containerHome);
final Logger fileLogger = new FileLogger(new File(cargoLogFilePath + "cargo.log"), true);
fileLogger.setLevel(LogLevel.DEBUG);
installedLocalContainer.setLogger(fileLogger);
installedLocalContainer.setOutput(cargoLogFilePath + "output.log");
if (deployTimeOut != null) {
installedLocalContainer.setTimeout(deployTimeOut);
}
// set the system properties
installedLocalContainer.setSystemProperties(systemProperties);
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting Tomcat ...");
}
// startup installedLocalContainer
installedLocalContainer.start();
// Here you are assured the container is started.
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Tomcat up and running!");
}
}
/**
* @return the deployableLocations
*/
public Map<String, String> getDeployableLocations() {
return deployableLocations;
}
/**
* Determines the type of deployable.
*
* @param type
* A string representation of the deployable type.
* @return Returns a {@link DeployableType} that corresponds to the string
* representation or if none could be found the default value (EAR)
* will be returned.
*/
private DeployableType determineDeployableType(final String type) {
DeployableType deployableType;
/*
* Check what kind of deployable it is.
*/
if ("EAR".equals(type)) {
throw new DeployException("Tomcat doesn't support EAR files!");
} else if ("WAR".equals(type)) {
deployableType = DeployableType.WAR;
} else if ("EJB".equals(type)) {
throw new DeployException("Tomcat doesn't support EJB files!");
} else {
// Default value is WAR file
deployableType = DeployableType.WAR;
}
return deployableType;
}
/**
* Adds a deployable to the {@link LocalConfiguration}.
*
* @param configuration
* The configuration where a deployable can be added to.
* @param path
* The path where the deployable can be found.
* @param deployableType
* The type of deployable.
*/
private void addDeployable(final LocalConfiguration configuration, final String path, final DeployableType deployableType) {
// retrieve deployable file
final Deployable deployable = new DefaultDeployableFactory().createDeployable("jetty", path, deployableType);
// add deployable
configuration.addDeployable(deployable);
}
/**
* Constructs the full path to a specific directory from the configuration.
*
* @param dir
* The directory name.
* @return Returns a String representation of the full path.
*/
private String getContainerDirectory(final String dir) {
final StringBuilder fullPath = new StringBuilder();
fullPath.append(this.containerHome);
fullPath.append(dir);
final String path = fullPath.toString();
final File directory = new File(path);
if (!directory.exists()) {
final String msg = dir + " directory does not excist! : " + path;
if (LOGGER.isErrorEnabled()) {
LOGGER.error(msg);
}
throw new ConfigurationException(msg);
}
return path;
}
@Override
public String getSharedLibDirectory() {
return getContainerDirectory("lib/");
}
@Override
public String getConfDirectory() {
return getContainerDirectory("conf/");
}
public void setTomcatVersion(final String tomcatVersion) {
this.tomcatVersion = tomcatVersion;
}
public Integer getAjpPort() {
return ajpPort;
}
@Required
public void setAjpPort(final Integer ajpPort) {
this.ajpPort = ajpPort;
}
public Integer getRmiPort() {
return rmiPort;
}
@Required
public void setRmiPort(final Integer rmiPort) {
this.rmiPort = rmiPort;
}
}
| |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.esb.mediator.test.aggregate;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.util.AXIOMUtil;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.esb.integration.common.utils.ESBIntegrationTest;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import java.io.IOException;
import java.util.Iterator;
public class AggregateWithHighMaxAndLowMinTestCase extends ESBIntegrationTest {
private AggregatedRequestClient aggregatedRequestClient;
private int no_of_requests=0;
@BeforeClass(alwaysRun = true)
public void setEnvironment() throws Exception {
super.init();
verifyProxyServiceExistence("aggregateMediatorTestProxy1");
aggregatedRequestClient= new AggregatedRequestClient();
aggregatedRequestClient.setProxyServiceUrl(getProxyServiceURLHttp("aggregateMediatorTestProxy1"));
aggregatedRequestClient.setSymbol("IBM");
}
@Test(groups = {"wso2.esb"}, description = "less number of messages than minimum count")
public void testLessThanMinimum() throws IOException, XMLStreamException {
int responseCount=0;
no_of_requests=1;
aggregatedRequestClient.setNoOfIterations(no_of_requests);
String Response= aggregatedRequestClient.getResponse();
Assert.assertNotNull(Response);
OMElement Response2= AXIOMUtil.stringToOM(Response);
OMElement soapBody = Response2.getFirstElement();
Iterator iterator =soapBody.getChildrenWithName(new QName("http://services.samples",
"getQuoteResponse"));
while (iterator.hasNext()) {
responseCount++;
OMElement getQuote = (OMElement) iterator.next();
Assert.assertTrue(getQuote.toString().contains("IBM"));
}
Assert.assertEquals(responseCount, no_of_requests, "GetQuoteResponse Element count mismatched");
}
@Test(groups = {"wso2.esb"}, description = "number of messages is equal to the minimum")
public void testEqualtoMinimum() throws IOException, XMLStreamException {
int responseCount=0;
no_of_requests=2;
aggregatedRequestClient.setNoOfIterations(no_of_requests);
String Response= aggregatedRequestClient.getResponse();
Assert.assertNotNull(Response);
OMElement Response2= AXIOMUtil.stringToOM(Response);
OMElement soapBody = Response2.getFirstElement();
Iterator iterator =soapBody.getChildrenWithName(new QName("http://services.samples",
"getQuoteResponse"));
while (iterator.hasNext()) {
responseCount++;
OMElement getQuote = (OMElement) iterator.next();
Assert.assertTrue(getQuote.toString().contains("IBM"));
}
Assert.assertEquals(responseCount, no_of_requests, "GetQuoteResponse Element count mismatched");
}
@Test(groups = {"wso2.esb"}, description = "number of messages is equal to the maximum", enabled = false)
public void testEqualtoMaximum() throws IOException, XMLStreamException {
int responseCount=0;
no_of_requests=100;
aggregatedRequestClient.setNoOfIterations(no_of_requests);
String Response= aggregatedRequestClient.getResponse();
Assert.assertNotNull(Response);
OMElement Response2= AXIOMUtil.stringToOM(Response);
OMElement soapBody = Response2.getFirstElement();
Iterator iterator =soapBody.getChildrenWithName(new QName("http://services.samples",
"getQuoteResponse"));
while (iterator.hasNext()) {
responseCount++;
OMElement getQuote = (OMElement) iterator.next();
Assert.assertTrue(getQuote.toString().contains("IBM"));
}
Assert.assertEquals(responseCount, no_of_requests, "GetQuoteResponse Element count mismatched");
}
@Test(groups = {"wso2.esb"}, description = "higher number of messages than minimum count")
public void testMoreNumberThanMinimum() throws IOException, XMLStreamException {
int responseCount=0;
no_of_requests=10;
aggregatedRequestClient.setNoOfIterations(no_of_requests);
String Response= aggregatedRequestClient.getResponse();
Assert.assertNotNull(Response);
OMElement Response2= AXIOMUtil.stringToOM(Response);
OMElement soapBody = Response2.getFirstElement();
Iterator iterator =soapBody.getChildrenWithName(new QName("http://services.samples",
"getQuoteResponse"));
while (iterator.hasNext()) {
responseCount++;
OMElement getQuote = (OMElement) iterator.next();
Assert.assertTrue(getQuote.toString().contains("IBM"));
}
Assert.assertTrue(2<=responseCount&&responseCount<=no_of_requests);
}
@Test(groups = {"wso2.esb"}, description = "more number of messages than maximum count")
public void testMoreNumberThanMaximum() throws IOException, XMLStreamException {
int responseCount=0;
no_of_requests=110;
aggregatedRequestClient.setNoOfIterations(no_of_requests);
String Response= aggregatedRequestClient.getResponse();
Assert.assertNotNull(Response);
OMElement Response2= AXIOMUtil.stringToOM(Response);
OMElement soapBody = Response2.getFirstElement();
Iterator iterator =soapBody.getChildrenWithName(new QName("http://services.samples",
"getQuoteResponse"));
while (iterator.hasNext()) {
responseCount++;
OMElement getQuote = (OMElement) iterator.next();
Assert.assertTrue(getQuote.toString().contains("IBM"));
}
Assert.assertTrue(2<=responseCount&&responseCount<=100);
}
@Test(groups = {"wso2.esb"}, description = "less number of messages than maximum count")
public void testLessNumberThanMaximum() throws IOException, XMLStreamException {
int responseCount=0;
no_of_requests=60;
aggregatedRequestClient.setNoOfIterations(no_of_requests);
String Response= aggregatedRequestClient.getResponse();
Assert.assertNotNull(Response);
OMElement Response2= AXIOMUtil.stringToOM(Response);
OMElement soapBody = Response2.getFirstElement();
Iterator iterator =soapBody.getChildrenWithName(new QName("http://services.samples",
"getQuoteResponse"));
while (iterator.hasNext()) {
responseCount++;
OMElement getQuote = (OMElement) iterator.next();
Assert.assertTrue(getQuote.toString().contains("IBM"));
}
Assert.assertTrue(2<=responseCount&&responseCount<=no_of_requests);
}
@AfterClass(alwaysRun = true)
public void destroy() throws Exception {
aggregatedRequestClient = null;
super.cleanup();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.spark;
import static scala.collection.JavaConversions.asJavaCollection;
import static scala.collection.JavaConversions.asJavaIterable;
import static scala.collection.JavaConversions.collectionAsScalaIterable;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.apache.spark.SparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.catalyst.expressions.Attribute;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.zeppelin.annotation.ZeppelinApi;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.AngularObjectWatcher;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.display.Input.ParamOption;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterContextRunner;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.spark.dep.SparkDependencyResolver;
import org.apache.zeppelin.resource.Resource;
import org.apache.zeppelin.resource.ResourcePool;
import org.apache.zeppelin.resource.ResourceSet;
import scala.Tuple2;
import scala.Unit;
/**
* Spark context for zeppelin.
*/
public class ZeppelinContext {
private SparkDependencyResolver dep;
private InterpreterContext interpreterContext;
private int maxResult;
public ZeppelinContext(SparkContext sc, SQLContext sql,
InterpreterContext interpreterContext,
SparkDependencyResolver dep,
int maxResult) {
this.sc = sc;
this.sqlContext = sql;
this.interpreterContext = interpreterContext;
this.dep = dep;
this.maxResult = maxResult;
}
public SparkContext sc;
public SQLContext sqlContext;
public HiveContext hiveContext;
private GUI gui;
@ZeppelinApi
public Object input(String name) {
return input(name, "");
}
@ZeppelinApi
public Object input(String name, Object defaultValue) {
return gui.input(name, defaultValue);
}
@ZeppelinApi
public Object select(String name, scala.collection.Iterable<Tuple2<Object, String>> options) {
return select(name, "", options);
}
@ZeppelinApi
public Object select(String name, Object defaultValue,
scala.collection.Iterable<Tuple2<Object, String>> options) {
return gui.select(name, defaultValue, tuplesToParamOptions(options));
}
@ZeppelinApi
public scala.collection.Iterable<Object> checkbox(String name,
scala.collection.Iterable<Tuple2<Object, String>> options) {
List<Object> allChecked = new LinkedList<Object>();
for (Tuple2<Object, String> option : asJavaIterable(options)) {
allChecked.add(option._1());
}
return checkbox(name, collectionAsScalaIterable(allChecked), options);
}
@ZeppelinApi
public scala.collection.Iterable<Object> checkbox(String name,
scala.collection.Iterable<Object> defaultChecked,
scala.collection.Iterable<Tuple2<Object, String>> options) {
return collectionAsScalaIterable(gui.checkbox(name, asJavaCollection(defaultChecked),
tuplesToParamOptions(options)));
}
private ParamOption[] tuplesToParamOptions(
scala.collection.Iterable<Tuple2<Object, String>> options) {
int n = options.size();
ParamOption[] paramOptions = new ParamOption[n];
Iterator<Tuple2<Object, String>> it = asJavaIterable(options).iterator();
int i = 0;
while (it.hasNext()) {
Tuple2<Object, String> valueAndDisplayValue = it.next();
paramOptions[i++] = new ParamOption(valueAndDisplayValue._1(), valueAndDisplayValue._2());
}
return paramOptions;
}
public void setGui(GUI o) {
this.gui = o;
}
private void restartInterpreter() {
}
public InterpreterContext getInterpreterContext() {
return interpreterContext;
}
public void setInterpreterContext(InterpreterContext interpreterContext) {
this.interpreterContext = interpreterContext;
}
public void setMaxResult(int maxResult) {
this.maxResult = maxResult;
}
/**
* show DataFrame or SchemaRDD
* @param o DataFrame or SchemaRDD object
*/
@ZeppelinApi
public void show(Object o) {
show(o, maxResult);
}
/**
* show DataFrame or SchemaRDD
* @param o DataFrame or SchemaRDD object
* @param maxResult maximum number of rows to display
*/
@ZeppelinApi
public void show(Object o, int maxResult) {
Class cls = null;
try {
cls = this.getClass().forName("org.apache.spark.sql.DataFrame");
} catch (ClassNotFoundException e) {
}
if (cls == null) {
try {
cls = this.getClass().forName("org.apache.spark.sql.SchemaRDD");
} catch (ClassNotFoundException e) {
}
}
if (cls == null) {
throw new InterpreterException("Can not road DataFrame/SchemaRDD class");
}
try {
if (cls.isInstance(o)) {
interpreterContext.out.write(showDF(sc, interpreterContext, o, maxResult));
} else {
interpreterContext.out.write(o.toString());
}
} catch (IOException e) {
throw new InterpreterException(e);
}
}
public static String showDF(ZeppelinContext z, Object df) {
return showDF(z.sc, z.interpreterContext, df, z.maxResult);
}
public static String showDF(SparkContext sc,
InterpreterContext interpreterContext,
Object df, int maxResult) {
Object[] rows = null;
Method take;
String jobGroup = "zeppelin-" + interpreterContext.getParagraphId();
sc.setJobGroup(jobGroup, "Zeppelin", false);
try {
take = df.getClass().getMethod("take", int.class);
rows = (Object[]) take.invoke(df, maxResult + 1);
} catch (NoSuchMethodException | SecurityException | IllegalAccessException
| IllegalArgumentException | InvocationTargetException | ClassCastException e) {
sc.clearJobGroup();
throw new InterpreterException(e);
}
List<Attribute> columns = null;
// get field names
try {
// Use reflection because of classname returned by queryExecution changes from
// Spark <1.5.2 org.apache.spark.sql.SQLContext$QueryExecution
// Spark 1.6.0> org.apache.spark.sql.hive.HiveContext$QueryExecution
Object qe = df.getClass().getMethod("queryExecution").invoke(df);
Object a = qe.getClass().getMethod("analyzed").invoke(qe);
scala.collection.Seq seq = (scala.collection.Seq) a.getClass().getMethod("output").invoke(a);
columns = (List<Attribute>) scala.collection.JavaConverters.seqAsJavaListConverter(seq)
.asJava();
} catch (NoSuchMethodException | SecurityException | IllegalAccessException
| IllegalArgumentException | InvocationTargetException e) {
throw new InterpreterException(e);
}
StringBuilder msg = new StringBuilder();
msg.append("%table ");
for (Attribute col : columns) {
msg.append(col.name() + "\t");
}
String trim = msg.toString().trim();
msg = new StringBuilder(trim);
msg.append("\n");
// ArrayType, BinaryType, BooleanType, ByteType, DecimalType, DoubleType, DynamicType,
// FloatType, FractionalType, IntegerType, IntegralType, LongType, MapType, NativeType,
// NullType, NumericType, ShortType, StringType, StructType
try {
for (int r = 0; r < maxResult && r < rows.length; r++) {
Object row = rows[r];
Method isNullAt = row.getClass().getMethod("isNullAt", int.class);
Method apply = row.getClass().getMethod("apply", int.class);
for (int i = 0; i < columns.size(); i++) {
if (!(Boolean) isNullAt.invoke(row, i)) {
msg.append(apply.invoke(row, i).toString());
} else {
msg.append("null");
}
if (i != columns.size() - 1) {
msg.append("\t");
}
}
msg.append("\n");
}
} catch (NoSuchMethodException | SecurityException | IllegalAccessException
| IllegalArgumentException | InvocationTargetException e) {
throw new InterpreterException(e);
}
if (rows.length > maxResult) {
msg.append("\n<font color=red>Results are limited by " + maxResult + ".</font>");
}
sc.clearJobGroup();
return msg.toString();
}
/**
* Run paragraph by id
* @param id
*/
@ZeppelinApi
public void run(String id) {
run(id, interpreterContext);
}
/**
* Run paragraph by id
* @param id
* @param context
*/
@ZeppelinApi
public void run(String id, InterpreterContext context) {
if (id.equals(context.getParagraphId())) {
throw new InterpreterException("Can not run current Paragraph");
}
for (InterpreterContextRunner r : context.getRunners()) {
if (id.equals(r.getParagraphId())) {
r.run();
return;
}
}
throw new InterpreterException("Paragraph " + id + " not found");
}
/**
* Run paragraph at idx
* @param idx
*/
@ZeppelinApi
public void run(int idx) {
run(idx, interpreterContext);
}
/**
* Run paragraph at index
* @param idx index starting from 0
* @param context interpreter context
*/
public void run(int idx, InterpreterContext context) {
if (idx >= context.getRunners().size()) {
throw new InterpreterException("Index out of bound");
}
InterpreterContextRunner runner = context.getRunners().get(idx);
if (runner.getParagraphId().equals(context.getParagraphId())) {
throw new InterpreterException("Can not run current Paragraph");
}
runner.run();
}
@ZeppelinApi
public void run(List<Object> paragraphIdOrIdx) {
run(paragraphIdOrIdx, interpreterContext);
}
/**
* Run paragraphs
* @param paragraphIdOrIdx list of paragraph id or idx
*/
@ZeppelinApi
public void run(List<Object> paragraphIdOrIdx, InterpreterContext context) {
for (Object idOrIdx : paragraphIdOrIdx) {
if (idOrIdx instanceof String) {
String id = (String) idOrIdx;
run(id, context);
} else if (idOrIdx instanceof Integer) {
Integer idx = (Integer) idOrIdx;
run(idx, context);
} else {
throw new InterpreterException("Paragraph " + idOrIdx + " not found");
}
}
}
@ZeppelinApi
public void runAll() {
runAll(interpreterContext);
}
/**
* Run all paragraphs. except this.
*/
@ZeppelinApi
public void runAll(InterpreterContext context) {
for (InterpreterContextRunner r : context.getRunners()) {
if (r.getParagraphId().equals(context.getParagraphId())) {
// skip itself
continue;
}
r.run();
}
}
@ZeppelinApi
public List<String> listParagraphs() {
List<String> paragraphs = new LinkedList<String>();
for (InterpreterContextRunner r : interpreterContext.getRunners()) {
paragraphs.add(r.getParagraphId());
}
return paragraphs;
}
private AngularObject getAngularObject(String name, InterpreterContext interpreterContext) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
String noteId = interpreterContext.getNoteId();
// try get local object
AngularObject paragraphAo = registry.get(name, noteId, interpreterContext.getParagraphId());
AngularObject noteAo = registry.get(name, noteId, null);
AngularObject ao = paragraphAo != null ? paragraphAo : noteAo;
if (ao == null) {
// then global object
ao = registry.get(name, null, null);
}
return ao;
}
/**
* Get angular object. Look up notebook scope first and then global scope
* @param name variable name
* @return value
*/
@ZeppelinApi
public Object angular(String name) {
AngularObject ao = getAngularObject(name, interpreterContext);
if (ao == null) {
return null;
} else {
return ao.get();
}
}
/**
* Get angular object. Look up global scope
* @param name variable name
* @return value
*/
@Deprecated
public Object angularGlobal(String name) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
AngularObject ao = registry.get(name, null, null);
if (ao == null) {
return null;
} else {
return ao.get();
}
}
/**
* Create angular variable in notebook scope and bind with front end Angular display system.
* If variable exists, it'll be overwritten.
* @param name name of the variable
* @param o value
*/
@ZeppelinApi
public void angularBind(String name, Object o) {
angularBind(name, o, interpreterContext.getNoteId());
}
/**
* Create angular variable in global scope and bind with front end Angular display system.
* If variable exists, it'll be overwritten.
* @param name name of the variable
* @param o value
*/
@Deprecated
public void angularBindGlobal(String name, Object o) {
angularBind(name, o, (String) null);
}
/**
* Create angular variable in local scope and bind with front end Angular display system.
* If variable exists, value will be overwritten and watcher will be added.
* @param name name of variable
* @param o value
* @param watcher watcher of the variable
*/
@ZeppelinApi
public void angularBind(String name, Object o, AngularObjectWatcher watcher) {
angularBind(name, o, interpreterContext.getNoteId(), watcher);
}
/**
* Create angular variable in global scope and bind with front end Angular display system.
* If variable exists, value will be overwritten and watcher will be added.
* @param name name of variable
* @param o value
* @param watcher watcher of the variable
*/
@Deprecated
public void angularBindGlobal(String name, Object o, AngularObjectWatcher watcher) {
angularBind(name, o, null, watcher);
}
/**
* Add watcher into angular variable (local scope)
* @param name name of the variable
* @param watcher watcher
*/
@ZeppelinApi
public void angularWatch(String name, AngularObjectWatcher watcher) {
angularWatch(name, interpreterContext.getNoteId(), watcher);
}
/**
* Add watcher into angular variable (global scope)
* @param name name of the variable
* @param watcher watcher
*/
@Deprecated
public void angularWatchGlobal(String name, AngularObjectWatcher watcher) {
angularWatch(name, null, watcher);
}
@ZeppelinApi
public void angularWatch(String name,
final scala.Function2<Object, Object, Unit> func) {
angularWatch(name, interpreterContext.getNoteId(), func);
}
@Deprecated
public void angularWatchGlobal(String name,
final scala.Function2<Object, Object, Unit> func) {
angularWatch(name, null, func);
}
@ZeppelinApi
public void angularWatch(
String name,
final scala.Function3<Object, Object, InterpreterContext, Unit> func) {
angularWatch(name, interpreterContext.getNoteId(), func);
}
@Deprecated
public void angularWatchGlobal(
String name,
final scala.Function3<Object, Object, InterpreterContext, Unit> func) {
angularWatch(name, null, func);
}
/**
* Remove watcher from angular variable (local)
* @param name
* @param watcher
*/
@ZeppelinApi
public void angularUnwatch(String name, AngularObjectWatcher watcher) {
angularUnwatch(name, interpreterContext.getNoteId(), watcher);
}
/**
* Remove watcher from angular variable (global)
* @param name
* @param watcher
*/
@Deprecated
public void angularUnwatchGlobal(String name, AngularObjectWatcher watcher) {
angularUnwatch(name, null, watcher);
}
/**
* Remove all watchers for the angular variable (local)
* @param name
*/
@ZeppelinApi
public void angularUnwatch(String name) {
angularUnwatch(name, interpreterContext.getNoteId());
}
/**
* Remove all watchers for the angular variable (global)
* @param name
*/
@Deprecated
public void angularUnwatchGlobal(String name) {
angularUnwatch(name, (String) null);
}
/**
* Remove angular variable and all the watchers.
* @param name
*/
@ZeppelinApi
public void angularUnbind(String name) {
String noteId = interpreterContext.getNoteId();
angularUnbind(name, noteId);
}
/**
* Remove angular variable and all the watchers.
* @param name
*/
@Deprecated
public void angularUnbindGlobal(String name) {
angularUnbind(name, null);
}
/**
* Create angular variable in notebook scope and bind with front end Angular display system.
* If variable exists, it'll be overwritten.
* @param name name of the variable
* @param o value
*/
private void angularBind(String name, Object o, String noteId) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
if (registry.get(name, noteId, null) == null) {
registry.add(name, o, noteId, null);
} else {
registry.get(name, noteId, null).set(o);
}
}
/**
* Create angular variable in notebook scope and bind with front end Angular display
* system.
* If variable exists, value will be overwritten and watcher will be added.
* @param name name of variable
* @param o value
* @param watcher watcher of the variable
*/
private void angularBind(String name, Object o, String noteId, AngularObjectWatcher watcher) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
if (registry.get(name, noteId, null) == null) {
registry.add(name, o, noteId, null);
} else {
registry.get(name, noteId, null).set(o);
}
angularWatch(name, watcher);
}
/**
* Add watcher into angular binding variable
* @param name name of the variable
* @param watcher watcher
*/
private void angularWatch(String name, String noteId, AngularObjectWatcher watcher) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
if (registry.get(name, noteId, null) != null) {
registry.get(name, noteId, null).addWatcher(watcher);
}
}
private void angularWatch(String name, String noteId,
final scala.Function2<Object, Object, Unit> func) {
AngularObjectWatcher w = new AngularObjectWatcher(getInterpreterContext()) {
@Override
public void watch(Object oldObject, Object newObject,
InterpreterContext context) {
func.apply(newObject, newObject);
}
};
angularWatch(name, noteId, w);
}
private void angularWatch(
String name,
String noteId,
final scala.Function3<Object, Object, InterpreterContext, Unit> func) {
AngularObjectWatcher w = new AngularObjectWatcher(getInterpreterContext()) {
@Override
public void watch(Object oldObject, Object newObject,
InterpreterContext context) {
func.apply(oldObject, newObject, context);
}
};
angularWatch(name, noteId, w);
}
/**
* Remove watcher
* @param name
* @param watcher
*/
private void angularUnwatch(String name, String noteId, AngularObjectWatcher watcher) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
if (registry.get(name, noteId, null) != null) {
registry.get(name, noteId, null).removeWatcher(watcher);
}
}
/**
* Remove all watchers for the angular variable
* @param name
*/
private void angularUnwatch(String name, String noteId) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
if (registry.get(name, noteId, null) != null) {
registry.get(name, noteId, null).clearAllWatchers();
}
}
/**
* Remove angular variable and all the watchers.
* @param name
*/
private void angularUnbind(String name, String noteId) {
AngularObjectRegistry registry = interpreterContext.getAngularObjectRegistry();
registry.remove(name, noteId, null);
}
/**
* Add object into resource pool
* @param name
* @param value
*/
@ZeppelinApi
public void put(String name, Object value) {
ResourcePool resourcePool = interpreterContext.getResourcePool();
resourcePool.put(name, value);
}
/**
* Get object from resource pool
* Search local process first and then the other processes
* @param name
* @return null if resource not found
*/
@ZeppelinApi
public Object get(String name) {
ResourcePool resourcePool = interpreterContext.getResourcePool();
Resource resource = resourcePool.get(name);
if (resource != null) {
return resource.get();
} else {
return null;
}
}
/**
* Remove object from resourcePool
* @param name
*/
@ZeppelinApi
public void remove(String name) {
ResourcePool resourcePool = interpreterContext.getResourcePool();
resourcePool.remove(name);
}
/**
* Check if resource pool has the object
* @param name
* @return
*/
@ZeppelinApi
public boolean containsKey(String name) {
ResourcePool resourcePool = interpreterContext.getResourcePool();
Resource resource = resourcePool.get(name);
return resource != null;
}
/**
* Get all resources
*/
@ZeppelinApi
public ResourceSet getAll() {
ResourcePool resourcePool = interpreterContext.getResourcePool();
return resourcePool.getAll();
}
}
| |
package com.androidsdk.snaphy.snaphyandroidsdk.models;
import org.json.JSONObject;
import org.json.JSONArray;
import android.util.Log;
import java.util.List;
import com.androidsdk.snaphy.snaphyandroidsdk.adapter.SnaphyRestAdapter;
import com.strongloop.android.remoting.adapters.Adapter;
import android.content.Context;
/*
Replacing with custom Snaphy callback methods
import com.strongloop.android.loopback.callbacks.ListCallback;
import com.strongloop.android.loopback.callbacks.ObjectCallback;
import com.strongloop.android.loopback.callbacks.VoidCallback;
*/
import com.androidsdk.snaphy.snaphyandroidsdk.callbacks.ObjectCallback;
import com.androidsdk.snaphy.snaphyandroidsdk.callbacks.DataListCallback;
import com.androidsdk.snaphy.snaphyandroidsdk.callbacks.VoidCallback;
import com.androidsdk.snaphy.snaphyandroidsdk.list.DataList;
//Import self repository..
import com.androidsdk.snaphy.snaphyandroidsdk.repository.SnaphyAclRepository;
//Now import repository of related models..
import com.androidsdk.snaphy.snaphyandroidsdk.repository.SnaphyAclPropRepository;
import com.androidsdk.snaphy.snaphyandroidsdk.repository.SnaphyAclRelationRepository;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
public class SnaphyAcl extends Model {
//For converting all model values to hashMap
private transient Map<String, Object> hashMap = new HashMap<>();
public Map<String, ? extends Object> convertMap(){
if(that.getId() != null){
return hashMap;
}else{
hashMap.put("id", that.getId());
return hashMap;
}
}
private SnaphyAcl that ;
public SnaphyAcl (){
that = this;
}
private String added;
/* Adding Getter and Setter methods */
public String getAdded(){
return added;
}
/* Adding Getter and Setter methods */
public void setAdded(String added){
this.added = added;
//Update hashMap value..
hashMap.put("added", added);
}
private String updated;
/* Adding Getter and Setter methods */
public String getUpdated(){
return updated;
}
/* Adding Getter and Setter methods */
public void setUpdated(String updated){
this.updated = updated;
//Update hashMap value..
hashMap.put("updated", updated);
}
private String model;
/* Adding Getter and Setter methods */
public String getModel(){
return model;
}
/* Adding Getter and Setter methods */
public void setModel(String model){
this.model = model;
//Update hashMap value..
hashMap.put("model", model);
}
private String read;
/* Adding Getter and Setter methods */
public String getRead(){
return read;
}
/* Adding Getter and Setter methods */
public void setRead(String read){
this.read = read;
//Update hashMap value..
hashMap.put("read", read);
}
private String create;
/* Adding Getter and Setter methods */
public String getCreate(){
return create;
}
/* Adding Getter and Setter methods */
public void setCreate(String create){
this.create = create;
//Update hashMap value..
hashMap.put("create", create);
}
private String edit;
/* Adding Getter and Setter methods */
public String getEdit(){
return edit;
}
/* Adding Getter and Setter methods */
public void setEdit(String edit){
this.edit = edit;
//Update hashMap value..
hashMap.put("edit", edit);
}
private String delete;
/* Adding Getter and Setter methods */
public String getDelete(){
return delete;
}
/* Adding Getter and Setter methods */
public void setDelete(String delete){
this.delete = delete;
//Update hashMap value..
hashMap.put("delete", delete);
}
private String role;
/* Adding Getter and Setter methods */
public String getRole(){
return role;
}
/* Adding Getter and Setter methods */
public void setRole(String role){
this.role = role;
//Update hashMap value..
hashMap.put("role", role);
}
//------------------------------------Database Method---------------------------------------------------
public void save(final com.strongloop.android.loopback.callbacks.VoidCallback callback){
//Save to database..
save__db();
//Also save to database..
super.save(callback);
}
public void destroy(final com.strongloop.android.loopback.callbacks.VoidCallback callback){
SnaphyAclRepository lowercaseFirstLetterRepository = (SnaphyAclRepository) getRepository();
if(lowercaseFirstLetterRepository.isSTORE_LOCALLY()){
//Delete from database..
String id = getId().toString();
if(id != null && lowercaseFirstLetterRepository.getDb() != null){
lowercaseFirstLetterRepository.getDb().delete__db(id);
}
}
//Also save to database..
super.destroy(callback);
}
public void save__db(String id){
SnaphyAclRepository lowercaseFirstLetterRepository = (SnaphyAclRepository) getRepository();
if(lowercaseFirstLetterRepository.isSTORE_LOCALLY()){
if(id != null && lowercaseFirstLetterRepository.getDb() != null){
lowercaseFirstLetterRepository.getDb().upsert__db(id, this);
}
}
}
public void delete__db(){
SnaphyAclRepository lowercaseFirstLetterRepository = (SnaphyAclRepository) getRepository();
if(lowercaseFirstLetterRepository.isSTORE_LOCALLY()){
if(getId() != null && lowercaseFirstLetterRepository.getDb() != null){
String id = getId().toString();
lowercaseFirstLetterRepository.getDb().delete__db(id);
}
}
}
public void save__db(){
if(getId() == null){
return;
}
String id = getId().toString();
save__db(id);
}
//-----------------------------------END Database Methods------------------------------------------------
//Now adding relations between related models
//Define hasMany relation method here..
private transient DataList<SnaphyAclProp> snaphyAclProps ;
public DataList< SnaphyAclProp > getSnaphyAclProps() {
//Check for pure case of hasMany
//TODO: Modify foreign key name..
try{
SnaphyAclPropRepository snaphyAclPropRepository = (SnaphyAclPropRepository) getRepository();
if(that.getId() != null && snaphyAclPropRepository.getDb() != null){
//Fetch locally from db
//snaphyAclProps = getSnaphyAclProps__db(restAdapter);
// Getting single cont
snaphyAclProps = snaphyAclPropRepository.getDb().getAll__db("snaphyAclId", that.getId().toString());
//lowercaseFirstLetter(modelName)
}
}catch(Exception e){
//Ignore
}
return snaphyAclProps;
}
public void setSnaphyAclProps(DataList<SnaphyAclProp> snaphyAclProps) {
boolean hashType = false;
DataList<HashMap<String, Object>> hashMaps = new DataList<>();
for(Object o: snaphyAclProps){
if(o.getClass().equals(HashMap.class)){
hashType = true;
HashMap<String, Object> dataObj = (HashMap<String, Object>)o;
hashMaps.add(dataObj);
}
}
if(hashType){
setSnaphyAclProps1(hashMaps);
}else{
this.snaphyAclProps = snaphyAclProps;
//TODO: Warning move this to new thread
for(SnaphyAclProp data: snaphyAclProps){
try{
data.save__db();
} catch (NoSuchMethodError e) {
// ignore
}
}
}
}
/* //Adding related model automatically in case of include statement from server.. Adding 1 for removing same name error..
public void setSnaphyAclProps1(List<Map<String, Object>> snaphyAclProps) {
//First create a dummy Repo class object for ..
SnaphyAclPropRepository snaphyAclPropsRepository = new SnaphyAclPropRepository();
List<SnaphyAclProp> result = new ArrayList<>();
for (Map<String, Object> obj : snaphyAclProps) {
//Also add relation to child type for two way communication..
SnaphyAclProp obj1 = snaphyAclPropsRepository.createObject(obj);
result.add(obj1);
}
setSnaphyAclProps(result);
}
*/
//Adding related model automatically in case of include statement from server.. Adding 1 for removing same name error..
public void setSnaphyAclProps1(DataList<HashMap<String, Object>> snaphyAclProps) {
//First create a dummy Repo class object for ..
SnaphyAclPropRepository snaphyAclPropsRepository = new SnaphyAclPropRepository();
DataList<SnaphyAclProp> result = new DataList<>();
for (HashMap<String, Object> obj : snaphyAclProps) {
//Also add relation to child type for two way communication..
SnaphyAclProp obj1 = snaphyAclPropsRepository.createObject(obj);
result.add(obj1);
}
setSnaphyAclProps(result);
}
//Adding relation method..
//Add a dummy class Name object to seperate data..
public void addRelation(DataList<SnaphyAclProp> snaphyAclProps, SnaphyAclProp dummyClassInstance) {
that.setSnaphyAclProps(snaphyAclProps);
}
//Adding relation method..
//This will add a new data to the list relation object..
public void addRelation(SnaphyAclProp snaphyAclProps) {
try{
try{
//Save to database..
snaphyAclProps.save__db();
}catch (NoSuchMethodError e) {
// ignore
}
that.getSnaphyAclProps().add(snaphyAclProps);
}catch(Exception e){
DataList< SnaphyAclProp> snaphyAclProps1 = new DataList();
//Now add this item to list..
snaphyAclProps1.add(snaphyAclProps);
//Now set data....
that.setSnaphyAclProps(snaphyAclProps1);
}
}
//Implement logic for pure hasMany methods here....
//Now add instance methods to fetch the related belongsTo Model..
//Write the method here..
public void findById__snaphyAclProps( String fk, SnaphyRestAdapter restAdapter, final ObjectCallback<SnaphyAclProp> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.findById__snaphyAclProps( (String)that.getId(), fk, new ObjectCallback<SnaphyAclProp> (){
@Override
public void onSuccess(SnaphyAclProp object) {
try{
if(object != null){
//now add relation to this recipe.
addRelation(object);
//Also add relation to child type for two way communication..Removing two way communication for cyclic error
//object.addRelation(that);
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void destroyById__snaphyAclProps( String fk, SnaphyRestAdapter restAdapter, final VoidCallback callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.destroyById__snaphyAclProps( (String)that.getId(), fk, new VoidCallback (){
@Override
public void onSuccess() {
callback.onSuccess();
//Calling the finally..callback
callback.onFinally();
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void updateById__snaphyAclProps( String fk, SnaphyAclProp data, SnaphyRestAdapter restAdapter, final ObjectCallback<SnaphyAclProp> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.updateById__snaphyAclProps( (String)that.getId(), fk, data.convertMap(), new ObjectCallback<SnaphyAclProp> (){
@Override
public void onSuccess(SnaphyAclProp object) {
try{
if(object != null){
//now add relation to this recipe.
addRelation(object);
//Also add relation to child type for two way communication..Removing two way communication for cyclic error
//object.addRelation(that);
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void get__snaphyAclProps( Map<String, ? extends Object> filter, SnaphyRestAdapter restAdapter, final DataListCallback<SnaphyAclProp> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.get__snaphyAclProps( (String)that.getId(), filter, new DataListCallback<SnaphyAclProp> (){
@Override
public void onSuccess(DataList<SnaphyAclProp> object) {
try{
if(object != null){
//now add relation to this recipe.
SnaphyAclProp obj = new SnaphyAclProp();
addRelation(object, obj);
//Disabling two way communication for cyclic error..
/*for (SnaphyAclProp obj : object) {
//Also add relation to child type for two way communication..
obj.addRelation(that);
}*/
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void create__snaphyAclProps( SnaphyAclProp data, SnaphyRestAdapter restAdapter, final ObjectCallback<SnaphyAclProp> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.create__snaphyAclProps( (String)that.getId(), data.convertMap(), new ObjectCallback<SnaphyAclProp> (){
@Override
public void onSuccess(SnaphyAclProp object) {
try{
if(object != null){
//now add relation to this recipe.
addRelation(object);
//Also add relation to child type for two way communication..Removing two way communication for cyclic error
//object.addRelation(that);
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void delete__snaphyAclProps( SnaphyRestAdapter restAdapter, final VoidCallback callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.delete__snaphyAclProps( (String)that.getId(), new VoidCallback (){
@Override
public void onSuccess() {
callback.onSuccess();
//Calling the finally..callback
callback.onFinally();
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void count__snaphyAclProps( Map<String, ? extends Object> where, SnaphyRestAdapter restAdapter, final ObjectCallback<JSONObject> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.count__snaphyAclProps( (String)that.getId(), where, new ObjectCallback<JSONObject>(){
@Override
public void onSuccess(JSONObject object) {
try{
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Define hasMany, hasManyThrough method here..
//Define hasMany relation method here..
private transient DataList<SnaphyAclRelation> snaphyAclRelations ;
public DataList< SnaphyAclRelation > getSnaphyAclRelations() {
//Check for pure case of hasMany
//TODO: Modify foreign key name..
try{
SnaphyAclRelationRepository snaphyAclRelationRepository = (SnaphyAclRelationRepository) getRepository();
if(that.getId() != null && snaphyAclRelationRepository.getDb() != null){
//Fetch locally from db
//snaphyAclRelations = getSnaphyAclRelations__db(restAdapter);
// Getting single cont
snaphyAclRelations = snaphyAclRelationRepository.getDb().getAll__db("snaphyAclId", that.getId().toString());
//lowercaseFirstLetter(modelName)
}
}catch(Exception e){
//Ignore
}
return snaphyAclRelations;
}
public void setSnaphyAclRelations(DataList<SnaphyAclRelation> snaphyAclRelations) {
boolean hashType = false;
DataList<HashMap<String, Object>> hashMaps = new DataList<>();
for(Object o: snaphyAclRelations){
if(o.getClass().equals(HashMap.class)){
hashType = true;
HashMap<String, Object> dataObj = (HashMap<String, Object>)o;
hashMaps.add(dataObj);
}
}
if(hashType){
setSnaphyAclRelations1(hashMaps);
}else{
this.snaphyAclRelations = snaphyAclRelations;
//TODO: Warning move this to new thread
for(SnaphyAclRelation data: snaphyAclRelations){
try{
data.save__db();
} catch (NoSuchMethodError e) {
// ignore
}
}
}
}
/* //Adding related model automatically in case of include statement from server.. Adding 1 for removing same name error..
public void setSnaphyAclRelations1(List<Map<String, Object>> snaphyAclRelations) {
//First create a dummy Repo class object for ..
SnaphyAclRelationRepository snaphyAclRelationsRepository = new SnaphyAclRelationRepository();
List<SnaphyAclRelation> result = new ArrayList<>();
for (Map<String, Object> obj : snaphyAclRelations) {
//Also add relation to child type for two way communication..
SnaphyAclRelation obj1 = snaphyAclRelationsRepository.createObject(obj);
result.add(obj1);
}
setSnaphyAclRelations(result);
}
*/
//Adding related model automatically in case of include statement from server.. Adding 1 for removing same name error..
public void setSnaphyAclRelations1(DataList<HashMap<String, Object>> snaphyAclRelations) {
//First create a dummy Repo class object for ..
SnaphyAclRelationRepository snaphyAclRelationsRepository = new SnaphyAclRelationRepository();
DataList<SnaphyAclRelation> result = new DataList<>();
for (HashMap<String, Object> obj : snaphyAclRelations) {
//Also add relation to child type for two way communication..
SnaphyAclRelation obj1 = snaphyAclRelationsRepository.createObject(obj);
result.add(obj1);
}
setSnaphyAclRelations(result);
}
//Adding relation method..
//Add a dummy class Name object to seperate data..
public void addRelation(DataList<SnaphyAclRelation> snaphyAclRelations, SnaphyAclRelation dummyClassInstance) {
that.setSnaphyAclRelations(snaphyAclRelations);
}
//Adding relation method..
//This will add a new data to the list relation object..
public void addRelation(SnaphyAclRelation snaphyAclRelations) {
try{
try{
//Save to database..
snaphyAclRelations.save__db();
}catch (NoSuchMethodError e) {
// ignore
}
that.getSnaphyAclRelations().add(snaphyAclRelations);
}catch(Exception e){
DataList< SnaphyAclRelation> snaphyAclRelations1 = new DataList();
//Now add this item to list..
snaphyAclRelations1.add(snaphyAclRelations);
//Now set data....
that.setSnaphyAclRelations(snaphyAclRelations1);
}
}
//Implement logic for pure hasMany methods here....
//Now add instance methods to fetch the related belongsTo Model..
//Write the method here..
public void findById__snaphyAclRelations( String fk, SnaphyRestAdapter restAdapter, final ObjectCallback<SnaphyAclRelation> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.findById__snaphyAclRelations( (String)that.getId(), fk, new ObjectCallback<SnaphyAclRelation> (){
@Override
public void onSuccess(SnaphyAclRelation object) {
try{
if(object != null){
//now add relation to this recipe.
addRelation(object);
//Also add relation to child type for two way communication..Removing two way communication for cyclic error
//object.addRelation(that);
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void destroyById__snaphyAclRelations( String fk, SnaphyRestAdapter restAdapter, final VoidCallback callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.destroyById__snaphyAclRelations( (String)that.getId(), fk, new VoidCallback (){
@Override
public void onSuccess() {
callback.onSuccess();
//Calling the finally..callback
callback.onFinally();
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void updateById__snaphyAclRelations( String fk, SnaphyAclRelation data, SnaphyRestAdapter restAdapter, final ObjectCallback<SnaphyAclRelation> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.updateById__snaphyAclRelations( (String)that.getId(), fk, data.convertMap(), new ObjectCallback<SnaphyAclRelation> (){
@Override
public void onSuccess(SnaphyAclRelation object) {
try{
if(object != null){
//now add relation to this recipe.
addRelation(object);
//Also add relation to child type for two way communication..Removing two way communication for cyclic error
//object.addRelation(that);
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void get__snaphyAclRelations( Map<String, ? extends Object> filter, SnaphyRestAdapter restAdapter, final DataListCallback<SnaphyAclRelation> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.get__snaphyAclRelations( (String)that.getId(), filter, new DataListCallback<SnaphyAclRelation> (){
@Override
public void onSuccess(DataList<SnaphyAclRelation> object) {
try{
if(object != null){
//now add relation to this recipe.
SnaphyAclRelation obj = new SnaphyAclRelation();
addRelation(object, obj);
//Disabling two way communication for cyclic error..
/*for (SnaphyAclRelation obj : object) {
//Also add relation to child type for two way communication..
obj.addRelation(that);
}*/
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void create__snaphyAclRelations( SnaphyAclRelation data, SnaphyRestAdapter restAdapter, final ObjectCallback<SnaphyAclRelation> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.create__snaphyAclRelations( (String)that.getId(), data.convertMap(), new ObjectCallback<SnaphyAclRelation> (){
@Override
public void onSuccess(SnaphyAclRelation object) {
try{
if(object != null){
//now add relation to this recipe.
addRelation(object);
//Also add relation to child type for two way communication..Removing two way communication for cyclic error
//object.addRelation(that);
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}else{
callback.onSuccess(null);
//Calling the finally..callback
callback.onFinally();
}
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void delete__snaphyAclRelations( SnaphyRestAdapter restAdapter, final VoidCallback callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.delete__snaphyAclRelations( (String)that.getId(), new VoidCallback (){
@Override
public void onSuccess() {
callback.onSuccess();
//Calling the finally..callback
callback.onFinally();
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Write the method here..
public void count__snaphyAclRelations( Map<String, ? extends Object> where, SnaphyRestAdapter restAdapter, final ObjectCallback<JSONObject> callback) {
//Call the onBefore callback method..
callback.onBefore();
//Define methods here..
final SnaphyAclRepository snaphyAclRepo = restAdapter.createRepository(SnaphyAclRepository.class);
snaphyAclRepo.count__snaphyAclRelations( (String)that.getId(), where, new ObjectCallback<JSONObject>(){
@Override
public void onSuccess(JSONObject object) {
try{
callback.onSuccess(object);
//Calling the finally..callback
callback.onFinally();
}catch(Exception e){
Log.e("Snaphy", e.toString());
}
}
@Override
public void onError(Throwable t) {
//Now calling the callback
callback.onError(t);
//Calling the finally..callback
callback.onFinally();
}
});
} //method def ends here.
//Define hasMany, hasManyThrough method here..
}
| |
package com.github.ddth.queue.impl;
import com.github.ddth.kafka.KafkaClient;
import com.github.ddth.kafka.KafkaClient.ProducerType;
import com.github.ddth.kafka.KafkaMessage;
import com.github.ddth.queue.IPartitionSupport;
import com.github.ddth.queue.IQueue;
import com.github.ddth.queue.IQueueMessage;
import com.github.ddth.queue.utils.QueueException;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
/**
* (Experimental) Kafka implementation of {@link IQueue}.
*
* <ul>
* <li>Queue-size support: no</li>
* <li>Ephemeral storage support: no</li>
* </ul>
*
* @author Thanh Ba Nguyen <bnguyen2k@gmail.com>
* @since 0.3.2
*/
public abstract class KafkaQueue<ID, DATA> extends AbstractQueue<ID, DATA> {
private final Logger LOGGER = LoggerFactory.getLogger(KafkaQueue.class);
public final static String DEFAULT_BOOTSTRAP_SERVERS = "localhost:9092";
public final static String DEFAULT_TOPIC_NAME = "ddth-queue";
public final static ProducerType DEFAULT_PRODUCER_TYPE = ProducerType.LEADER_ACK;
public final static boolean DEFAULT_SEND_ASYNC = true;
public final static String DEFAULT_CONSUMER_GROUP_ID = "ddth-queue";
private KafkaClient kafkaClient;
private boolean myOwnKafkaClient = true;
private String bootstrapServers = DEFAULT_BOOTSTRAP_SERVERS;
private String topicName = DEFAULT_TOPIC_NAME;
private String consumerGroupId = DEFAULT_CONSUMER_GROUP_ID + System.currentTimeMillis();
private ProducerType producerType = DEFAULT_PRODUCER_TYPE;
private Properties producerProps, consumerProps;
private boolean sendAsync = DEFAULT_SEND_ASYNC;
/**
* Should messages sent to Kafka asynchronously (default {@code true})?
*
* @return
* @since 0.5.0
*/
public boolean isSendAsync() {
return sendAsync;
}
/**
* Should messages sent to Kafka asynchronously (default {@code true})?
*
* @param value
* @return
*/
public KafkaQueue<ID, DATA> setSendAsync(boolean value) {
this.sendAsync = value;
return this;
}
/**
* Kafka's producer type, used to send messages (default {@link ProducerType#LEADER_ACK}).
*
* @return
*/
public ProducerType getProducerType() {
return producerType;
}
/**
* Kafka's producer type, used to send messages (default {@link ProducerType#LEADER_ACK}).
*
* @param producerType
* @return
*/
public KafkaQueue<ID, DATA> setProducerType(ProducerType producerType) {
this.producerType = producerType;
return this;
}
/**
* Kafka bootstrap server list (format {@code host1:9092,host2:port2,host3:port3}).
*
* @return
* @since 0.4.0
*/
public String getKafkaBootstrapServers() {
return bootstrapServers;
}
/**
* Kafka bootstrap server list (format {@code host1:9092,host2:port2,host3:port3}).
*
* @param kafkaBootstrapServers
* @return
* @since 0.4.0
*/
public KafkaQueue<ID, DATA> setKafkaBootstrapServers(String kafkaBootstrapServers) {
this.bootstrapServers = kafkaBootstrapServers;
return this;
}
/**
* Custom configuration properties for Kafka producer.
*
* @return
* @since 0.4.0
*/
public Properties getKafkaProducerProperties() {
return producerProps;
}
/**
* Custom configuration properties for Kafka producer.
*
* @param kafkaProducerConfigs
* @return
* @since 0.4.0
*/
public KafkaQueue<ID, DATA> setKafkaProducerProperties(Properties kafkaProducerConfigs) {
this.producerProps = kafkaProducerConfigs;
return this;
}
/**
* Custom configuration properties for Kafka consumer.
*
* @return
* @since 0.4.0
*/
public Properties getKafkaConsumerProperties() {
return consumerProps;
}
/**
* Custom configuration properties for Kafka consumer.
*
* @param kafkaConsumerConfigs
* @return
* @since 0.4.0
*/
public KafkaQueue<ID, DATA> setKafkaConsumerProperties(Properties kafkaConsumerConfigs) {
this.consumerProps = kafkaConsumerConfigs;
return this;
}
/**
* Name of Kafka topic to store queue messages.
*
* @return
*/
public String getTopicName() {
return topicName;
}
/**
* Name of Kafka topic to store queue messages.
*
* @param topicName
* @return
*/
public KafkaQueue<ID, DATA> setTopicName(String topicName) {
this.topicName = topicName;
return this;
}
/**
* Kafka group-id to consume messages.
*
* @return
*/
public String getConsumerGroupId() {
return consumerGroupId;
}
/**
* Kafka group-id to consume messages.
*
* @param consumerGroupId
* @return
*/
public KafkaQueue<ID, DATA> setConsumerGroupId(String consumerGroupId) {
this.consumerGroupId = consumerGroupId;
return this;
}
/**
* An external {@link KafkaClient} can be used. If not set,
* {@link KafkaQueue} will automatically create a {@link KafkaClient} for
* its own use.
*
* @return
*/
protected KafkaClient getKafkaClient() {
return kafkaClient;
}
/**
* An external {@link KafkaClient} can be used. If not set,
* {@link KafkaQueue} will automatically create a {@link KafkaClient} for
* its own use.
*
* @param kafkaClient
* @param setMyOwnKafkaClient
* @return
*/
protected KafkaQueue<ID, DATA> setKafkaClient(KafkaClient kafkaClient, boolean setMyOwnKafkaClient) {
if (this.kafkaClient != null && myOwnKafkaClient) {
this.kafkaClient.destroy();
}
this.kafkaClient = kafkaClient;
myOwnKafkaClient = setMyOwnKafkaClient;
return this;
}
/**
* An external {@link KafkaClient} can be used. If not set,
* {@link KafkaQueue} will automatically create a {@link KafkaClient} for
* its own use.
*
* @param kafkaClient
* @return
*/
public KafkaQueue<ID, DATA> setKafkaClient(KafkaClient kafkaClient) {
return setKafkaClient(kafkaClient, false);
}
/*----------------------------------------------------------------------*/
protected KafkaClient buildKafkaClient() {
if (StringUtils.isBlank(bootstrapServers)) {
throw new IllegalStateException("Kafka bootstrap server list is not defined.");
}
KafkaClient kafkaClient = new KafkaClient(bootstrapServers);
kafkaClient.setProducerProperties(consumerProps).setConsumerProperties(consumerProps);
kafkaClient.init();
return kafkaClient;
}
/**
* Init method.
*
* @return
* @throws Exception
*/
public KafkaQueue<ID, DATA> init() throws Exception {
if (kafkaClient == null) {
setKafkaClient(buildKafkaClient(), true);
}
super.init();
if (kafkaClient == null) {
throw new IllegalStateException("Kafka client is null.");
}
return this;
}
/**
* Destroy method.
*/
public void destroy() {
try {
super.destroy();
} finally {
if (kafkaClient != null && myOwnKafkaClient) {
try {
kafkaClient.destroy();
} catch (Exception e) {
LOGGER.warn(e.getMessage(), e);
} finally {
kafkaClient = null;
}
}
}
}
/**
* Take a message from Kafka queue.
*
* @return
* @since 0.3.3
*/
protected IQueueMessage<ID, DATA> takeFromQueue() {
KafkaMessage kMsg = kafkaClient.consumeMessage(consumerGroupId, true, topicName, 1000, TimeUnit.MILLISECONDS);
return kMsg != null ? deserialize(kMsg.content()) : null;
}
/**
* {@inheritDoc}
* <p>{@code queueCase} is ignore as we always add new message to Kafka.</p>
*/
@Override
protected boolean doPutToQueue(IQueueMessage<ID, DATA> msg, PutToQueueCase queueCase) {
byte[] msgData = serialize(msg);
Object pKey = msg instanceof IPartitionSupport ? ((IPartitionSupport) msg).getPartitionKey() : msg.getId();
if (pKey == null) {
pKey = msg.getId();
}
KafkaMessage kMsg = pKey != null ?
new KafkaMessage(topicName, pKey.toString(), msgData) :
new KafkaMessage(topicName, msgData);
if (sendAsync) {
return kafkaClient.sendMessageRaw(producerType, kMsg) != null;
} else {
return kafkaClient.sendMessage(producerType, kMsg) != null;
}
}
/**
* {@inheritDoc}
*/
@Override
public void finish(IQueueMessage<ID, DATA> msg) {
// EMPTY
}
/**
* {@inheritDoc}
*/
@Override
public IQueueMessage<ID, DATA> take() {
return takeFromQueue();
}
/**
* {@inheritDoc}
* <p>
* This method throws {@link QueueException.OperationNotSupported}
*/
@Override
public Collection<IQueueMessage<ID, DATA>> getOrphanMessages(long thresholdTimestampMs) {
throw new QueueException.OperationNotSupported("This queue does not support retrieving orphan messages");
}
/**
* {@inheritDoc}
*/
@Override
public int queueSize() {
return SIZE_NOT_SUPPORTED;
}
/**
* {@inheritDoc}
*/
@Override
public int ephemeralSize() {
return SIZE_NOT_SUPPORTED;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.spark.io;
import com.google.api.client.util.BackOff;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import com.google.common.util.concurrent.Uninterruptibles;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.util.FluentBackoff;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Mostly based on {@link org.apache.beam.sdk.io.BoundedReadFromUnboundedSource},
* with some adjustments for this specific use-case.
*
* <p>A {@link BoundedSource} wrapping an {@link UnboundedSource} to complement Spark's micro-batch
* nature.
*
* <p>By design, Spark's micro-batches are bounded by their duration. Spark also provides a
* back-pressure mechanism that may signal a bound by max records.
*/
public class MicrobatchSource<T, CheckpointMarkT extends UnboundedSource.CheckpointMark>
extends BoundedSource<T> {
private static final Logger LOG = LoggerFactory.getLogger(MicrobatchSource.class);
private static volatile Cache<MicrobatchSource<?, ?>, BoundedReader<?>> readerCache;
private final UnboundedSource<T, CheckpointMarkT> source;
private final Duration maxReadTime;
private final int numInitialSplits;
private final long maxNumRecords;
private final int sourceId;
private final double readerCacheInterval;
// each split of the underlying UnboundedSource is associated with a (consistent) id
// to match it's corresponding CheckpointMark state.
private final int splitId;
MicrobatchSource(
UnboundedSource<T, CheckpointMarkT> source,
Duration maxReadTime,
int numInitialSplits,
long maxNumRecords,
int splitId,
int sourceId,
double readerCacheInterval) {
this.source = source;
this.maxReadTime = maxReadTime;
this.numInitialSplits = numInitialSplits;
this.maxNumRecords = maxNumRecords;
this.splitId = splitId;
this.sourceId = sourceId;
this.readerCacheInterval = readerCacheInterval;
}
/**
* Divide the given number of records into {@code numSplits} approximately
* equal parts that sum to {@code numRecords}.
*/
private static long[] splitNumRecords(long numRecords, int numSplits) {
long[] splitNumRecords = new long[numSplits];
for (int i = 0; i < numSplits; i++) {
splitNumRecords[i] = numRecords / numSplits;
}
for (int i = 0; i < numRecords % numSplits; i++) {
splitNumRecords[i] = splitNumRecords[i] + 1;
}
return splitNumRecords;
}
@Override
public List<? extends BoundedSource<T>> split(
long desiredBundleSizeBytes, PipelineOptions options) throws Exception {
List<MicrobatchSource<T, CheckpointMarkT>> result = new ArrayList<>();
List<? extends UnboundedSource<T, CheckpointMarkT>> splits =
source.split(numInitialSplits, options);
int numSplits = splits.size();
long[] numRecords = splitNumRecords(maxNumRecords, numSplits);
for (int i = 0; i < numSplits; i++) {
// splits must be stable, and cannot change during consecutive executions
// for example: Kafka should not add partitions if more then one topic is read.
result.add(new MicrobatchSource<>(splits.get(i), maxReadTime, 1, numRecords[i], i, sourceId,
readerCacheInterval));
}
return result;
}
@Override
public long getEstimatedSizeBytes(PipelineOptions options) throws Exception {
return 0;
}
@Override
public BoundedReader<T> createReader(PipelineOptions options) throws IOException {
return getOrCreateReader(options, null);
}
@SuppressWarnings("unchecked")
public BoundedReader<T> getOrCreateReader(
PipelineOptions options,
CheckpointMarkT checkpointMark) throws IOException {
try {
initReaderCache((long) readerCacheInterval);
return (BoundedReader<T>) readerCache.get(this, new ReaderLoader(options, checkpointMark));
} catch (ExecutionException e) {
throw new RuntimeException("Failed to get or create reader", e);
}
}
private static synchronized void initReaderCache(long readerCacheInterval) {
if (readerCache == null) {
LOG.info("Creating reader cache. Cache interval = " + readerCacheInterval + " ms.");
readerCache =
CacheBuilder.newBuilder()
.expireAfterAccess(readerCacheInterval, TimeUnit.MILLISECONDS)
.removalListener(new ReaderCacheRemovalListener())
.build();
}
}
@Override
public void validate() {
source.validate();
}
@Override
public Coder<T> getDefaultOutputCoder() {
return source.getDefaultOutputCoder();
}
public Coder<CheckpointMarkT> getCheckpointMarkCoder() {
return source.getCheckpointMarkCoder();
}
public String getId() {
return sourceId + "_" + splitId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof MicrobatchSource)) {
return false;
}
MicrobatchSource<?, ?> that = (MicrobatchSource<?, ?>) o;
if (sourceId != that.sourceId) {
return false;
}
return splitId == that.splitId;
}
@Override
public int hashCode() {
int result = sourceId;
result = 31 * result + splitId;
return result;
}
/**
* A {@link BoundedSource.BoundedReader}
* wrapping an {@link UnboundedSource.UnboundedReader}.
*
* <p>This Reader will read until it reached the bound of duration, or max records,
* whichever comes first.
*/
public class Reader extends BoundedSource.BoundedReader<T> {
private long recordsRead = 0L;
private Instant endTime;
private final FluentBackoff backoffFactory;
private final UnboundedSource.UnboundedReader<T> reader;
private boolean started;
private Reader(UnboundedSource.UnboundedReader<T> reader) {
this.reader = reader;
backoffFactory =
FluentBackoff.DEFAULT
.withInitialBackoff(Duration.millis(10))
.withMaxBackoff(maxReadTime.minus(1))
.withMaxCumulativeBackoff(maxReadTime.minus(1));
}
@Override
public boolean start() throws IOException {
LOG.debug("MicrobatchReader-{}: Starting a microbatch read from an unbounded source with a "
+ "max read time of {} msec, and max number of records {}.", splitId, maxReadTime,
maxNumRecords);
endTime = Instant.now().plus(maxReadTime);
// Since reader is reused in microbatches only start it if it has not already been started.
if (!started) {
started = true;
if (reader.start()) {
recordsRead++;
return true;
}
}
return advanceWithBackoff();
}
@Override
public boolean advance() throws IOException {
if (recordsRead >= maxNumRecords) {
finalizeCheckpoint();
return false;
}
return advanceWithBackoff();
}
private boolean advanceWithBackoff() throws IOException {
// Try reading from the source with exponential backoff
BackOff backoff = backoffFactory.backoff();
long nextSleep = backoff.nextBackOffMillis();
while (nextSleep != BackOff.STOP) {
if (endTime != null && Instant.now().isAfter(endTime)) {
finalizeCheckpoint();
return false;
}
if (reader.advance()) {
recordsRead++;
return true;
}
Uninterruptibles.sleepUninterruptibly(nextSleep, TimeUnit.MILLISECONDS);
nextSleep = backoff.nextBackOffMillis();
}
finalizeCheckpoint();
return false;
}
private void finalizeCheckpoint() throws IOException {
reader.getCheckpointMark().finalizeCheckpoint();
LOG.debug("MicrobatchReader-{}: finalized CheckpointMark successfully after "
+ "reading {} records.", splitId, recordsRead);
}
@Override
public T getCurrent() throws NoSuchElementException {
return reader.getCurrent();
}
@Override
public Instant getCurrentTimestamp() throws NoSuchElementException {
return reader.getCurrentTimestamp();
}
@Override
public void close() throws IOException {
reader.close();
}
@Override
public BoundedSource<T> getCurrentSource() {
return MicrobatchSource.this;
}
@SuppressWarnings("unchecked")
public CheckpointMarkT getCheckpointMark() {
return (CheckpointMarkT) reader.getCheckpointMark();
}
public Instant getWatermark() {
return reader.getWatermark();
}
}
/**
* {@link Callable} which creates a {@link Reader}.
*/
private class ReaderLoader implements Callable<BoundedReader<T>> {
private final PipelineOptions options;
private final CheckpointMarkT checkpointMark;
ReaderLoader(PipelineOptions options, CheckpointMarkT checkpointMark) {
this.options = options;
this.checkpointMark = checkpointMark;
}
@Override
public BoundedReader<T> call() throws Exception {
LOG.info("No cached reader found for split: [" + source
+ "]. Creating new reader at checkpoint mark " + checkpointMark);
return new Reader(source.createReader(options, checkpointMark));
}
}
/**
* Listener to be called when a reader is removed from {@link MicrobatchSource#readerCache}.
*/
private static class ReaderCacheRemovalListener
implements RemovalListener<MicrobatchSource<?, ?>, BoundedReader<?>> {
@Override public void onRemoval(
RemovalNotification<MicrobatchSource<?, ?>, BoundedReader<?>> notification) {
try {
notification.getValue().close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@VisibleForTesting
public static void clearCache() {
synchronized (MicrobatchSource.class) {
readerCache.invalidateAll();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxEntry;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.jetbrains.annotations.Nullable;
/**
* Transaction node mapping.
*/
public class GridDistributedTxMapping {
/** */
private static final AtomicReferenceFieldUpdater<GridDistributedTxMapping, Set> BACKUPS_FIELD_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(GridDistributedTxMapping.class, Set.class, "backups");
/** Mapped node. */
@GridToStringExclude
private ClusterNode primary;
/** Mapped backup nodes. */
private volatile Set<UUID> backups;
/** Entries. */
@GridToStringInclude
private final Collection<IgniteTxEntry> entries;
/** Explicit lock flag. */
private boolean explicitLock;
/** Query update flag. */
private boolean queryUpdate;
/** DHT version. */
private GridCacheVersion dhtVer;
/** {@code True} if this is last mapping for node. */
private boolean last;
/** Near cache entries count. */
private int nearEntries;
/** {@code True} if this is first mapping for optimistic tx on client node. */
private boolean clientFirst;
/**
* @param primary Primary node.
*/
public GridDistributedTxMapping(ClusterNode primary) {
this.primary = primary;
entries = new LinkedHashSet<>();
}
/**
* @return {@code True} if this is last mapping for node.
*/
public boolean last() {
return last;
}
/**
* @param last If {@code True} this is last mapping for node.
*/
public void last(boolean last) {
this.last = last;
}
/**
* @return {@code True} if this is first mapping for optimistic tx on client node.
*/
public boolean clientFirst() {
return clientFirst;
}
/**
* @param clientFirst {@code True} if this is first mapping for optimistic tx on client node.
*/
public void clientFirst(boolean clientFirst) {
this.clientFirst = clientFirst;
}
/**
* @return {@code True} if has colocated cache entries.
*/
public boolean hasColocatedCacheEntries() {
return entries.size() > nearEntries;
}
/**
* @return {@code True} if has near cache entries.
*/
public boolean hasNearCacheEntries() {
return nearEntries > 0;
}
/**
* @return Node.
*/
public ClusterNode primary() {
return primary;
}
/**
* @return Entries.
*/
public Collection<IgniteTxEntry> entries() {
return entries;
}
/**
* @return Near cache entries.
*/
@Nullable public Collection<IgniteTxEntry> nearCacheEntries() {
assert nearEntries > 0;
return F.view(entries, CU.FILTER_NEAR_CACHE_ENTRY);
}
/**
* @return {@code True} if mapping was created for a query update.
*/
public boolean queryUpdate() {
return queryUpdate;
}
/**
* Sets query update flag to {@code true}.
*/
public void markQueryUpdate() {
queryUpdate = true;
}
/**
* @return {@code True} if lock is explicit.
*/
public boolean explicitLock() {
return explicitLock;
}
/**
* Sets explicit flag to {@code true}.
*/
public void markExplicitLock() {
explicitLock = true;
}
/**
* @return DHT version.
*/
public GridCacheVersion dhtVersion() {
return dhtVer;
}
/**
* @param dhtVer DHT version.
* @param writeVer DHT writeVersion.
*/
public void dhtVersion(GridCacheVersion dhtVer, GridCacheVersion writeVer) {
this.dhtVer = dhtVer;
for (IgniteTxEntry e : entries)
e.dhtVersion(writeVer);
}
/**
* @return Reads.
*/
public Collection<IgniteTxEntry> reads() {
return F.view(entries, CU.READ_FILTER);
}
/**
* @return Writes.
*/
public Collection<IgniteTxEntry> writes() {
return F.view(entries, CU.WRITE_FILTER);
}
/**
* @return Near cache reads.
*/
public Collection<IgniteTxEntry> nearEntriesReads() {
assert hasNearCacheEntries();
return F.view(entries, CU.READ_FILTER_NEAR);
}
/**
* @return Near cache writes.
*/
public Collection<IgniteTxEntry> nearEntriesWrites() {
assert hasNearCacheEntries();
return F.view(entries, CU.WRITE_FILTER_NEAR);
}
/**
* @return Colocated cache reads.
*/
public Collection<IgniteTxEntry> colocatedEntriesReads() {
assert hasColocatedCacheEntries();
return F.view(entries, CU.READ_FILTER_COLOCATED);
}
/**
* @return Colocated cache writes.
*/
public Collection<IgniteTxEntry> colocatedEntriesWrites() {
assert hasColocatedCacheEntries();
return F.view(entries, CU.WRITE_FILTER_COLOCATED);
}
/**
* @param entry Adds entry.
*/
public void add(IgniteTxEntry entry) {
if (entries.add(entry) && entry.context().isNear())
nearEntries++;
}
/**
* @param entry Entry to remove.
* @return {@code True} if entry was removed.
*/
public boolean removeEntry(IgniteTxEntry entry) {
return entries.remove(entry);
}
/**
* @param keys Keys to evict readers for.
*/
public void evictReaders(@Nullable Collection<IgniteTxKey> keys) {
if (keys == null || keys.isEmpty())
return;
evictReaders(keys, entries);
}
/**
* @param keys Keys to evict readers for.
* @param entries Entries to check.
*/
private void evictReaders(Collection<IgniteTxKey> keys, @Nullable Collection<IgniteTxEntry> entries) {
if (entries == null || entries.isEmpty())
return;
for (Iterator<IgniteTxEntry> it = entries.iterator(); it.hasNext();) {
IgniteTxEntry entry = it.next();
if (keys.contains(entry.txKey()))
it.remove();
}
}
/**
* Whether empty or not.
*
* @return Empty or not.
*/
public boolean empty() {
return entries.isEmpty();
}
/**
* @param newBackups Backups to be added to this mapping.
*/
public void addBackups(Collection<UUID> newBackups) {
if (newBackups == null)
return;
if (backups == null)
BACKUPS_FIELD_UPDATER.compareAndSet(this, null, Collections.newSetFromMap(new ConcurrentHashMap<>()));
backups.addAll(newBackups);
}
/**
* @return Mapped backup nodes.
*/
public Set<UUID> backups() {
return backups != null ? backups : Collections.emptySet();
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridDistributedTxMapping.class, this, "node", primary.id());
}
}
| |
package com.redbugz.maf.jdom;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import org.apache.log4j.Logger;
import org.jdom.Element;
import com.redbugz.maf.Place;
/**
* Created by IntelliJ IDEA.
* User: logan
* Date: Mar 16, 2003
* Time: 3:37:00 PM
* To change this template use Options | File Templates.
*/
public class PlaceJDOM implements Place {
private static final Logger log = Logger.getLogger(PlaceJDOM.class);
public static final String PLACE = "PLAC";
private static final Map stateAbbreviations = new HashMap(60);
static {
stateAbbreviations.put("Alabama", "AL");
stateAbbreviations.put("Alaska", "AK");
stateAbbreviations.put("Arizona", "AZ");
stateAbbreviations.put("Arkansas", "AR");
stateAbbreviations.put("California", "CA");
stateAbbreviations.put("Colorado", "CO");
stateAbbreviations.put("Connecticut", "CT");
stateAbbreviations.put("Delaware", "DE");
stateAbbreviations.put("District of Columbia", "DC");
stateAbbreviations.put("Florida", "FL");
stateAbbreviations.put("Georgia", "GA");
stateAbbreviations.put("Hawaii", "HI");
stateAbbreviations.put("Idaho", "ID");
stateAbbreviations.put("Illinois", "IL");
stateAbbreviations.put("Indiana", "IN");
stateAbbreviations.put("Iowa", "IA");
stateAbbreviations.put("Kansas", "KS");
stateAbbreviations.put("Kentucky", "KY");
stateAbbreviations.put("Louisiana", "LA");
stateAbbreviations.put("Maine", "ME");
stateAbbreviations.put("Maryland", "MD");
stateAbbreviations.put("Massachusetts", "MA");
stateAbbreviations.put("Michigan", "MI");
stateAbbreviations.put("Minnesota", "MN");
stateAbbreviations.put("Mississippi", "MS");
stateAbbreviations.put("Missouri", "MO");
stateAbbreviations.put("Montana", "MT");
stateAbbreviations.put("Nebraska", "NE");
stateAbbreviations.put("Nevada", "NV");
stateAbbreviations.put("New Hampshire", "NH");
stateAbbreviations.put("New Jersey", "NJ");
stateAbbreviations.put("New Mexico", "NM");
stateAbbreviations.put("New York", "NY");
stateAbbreviations.put("North Carolina", "NC");
stateAbbreviations.put("North Dakota", "ND");
stateAbbreviations.put("Ohio", "OH");
stateAbbreviations.put("Oklahoma", "OK");
stateAbbreviations.put("Oregon", "OR");
stateAbbreviations.put("Pennsylvania", "PA");
stateAbbreviations.put("Rhode Island", "RI");
stateAbbreviations.put("South Carolina", "SC");
stateAbbreviations.put("South Dakota", "SD");
stateAbbreviations.put("Tennessee", "TN");
stateAbbreviations.put("Texas", "TX");
stateAbbreviations.put("Utah", "UT");
stateAbbreviations.put("Vermont", "VT");
stateAbbreviations.put("Virginia", "VA");
stateAbbreviations.put("Washington", "WA");
stateAbbreviations.put("West Virginia", "WV");
stateAbbreviations.put("Wisconsin", "WI");
stateAbbreviations.put("Wyoming", "WY");
}
protected String level1 = "";
protected String level2 = "";
protected String level3 = "";
protected String level4 = "";
protected Element element = new Element(PLACE);
public PlaceJDOM() {
}
public PlaceJDOM(String placeString) {
setPlaceString(placeString);
element.setText(placeString);
}
public PlaceJDOM(Element element) {
if (element == null) {
element = new Element(PLACE);
}
this.element = element;
setPlaceString(this.element.getText());
// log.debug("MyPlace() place=" + getFormatString());
}
public PlaceJDOM(Place oldPlace) {
if (oldPlace instanceof PlaceJDOM) {
element = ( (PlaceJDOM) oldPlace).getElement();
}
else {
setPlaceString(oldPlace.getFormatString());
}
}
public Element getElement() {
return element;
}
private void setPlaceString(String placeString) {
try {
element.setText(placeString);
StringTokenizer st = new StringTokenizer(placeString, ",");
level1 = st.nextToken().trim();
level2 = st.nextToken().trim();
level3 = st.nextToken().trim();
level4 = st.nextToken().trim();
}
catch (Exception e) {
//log.error("Exception: ", e); //To change body of catch statement use Options | File Templates.
}
}
public String getLevel1() {
return level1;
}
public String getLevel2() {
return level2;
}
public String getLevel3() {
return level3;
}
public String getLevel4() {
return level4;
}
public String getFormatString() {
String result = getLevel4().trim();
if (result.length() > 0) {
result = ", " + result;
}
result = getLevel3().trim() + result;
if (result.length() > 0) {
result = ", " + result;
}
result = getLevel2().trim() + result;
if (result.length() > 0) {
result = ", " + result;
}
result = getLevel1().trim() + result;
return result;
}
public String getAbbreviatedFormatString(int severity) {
if (severity <= 0) {
return getFormatString();
}
String level1Str = getLevel1().trim();
String level2Str = getLevel2().trim();
String level3Str = getLevel3().trim();
String level4Str = getLevel4().trim();
// int size = abbrevString.length();
if (severity >= 1) {
// abbreviate level 2
level2Str = "";//getAbbreviation(level2Str);
}
if (severity >= 2) {
// remove level 2
level2Str = "";
}
if (severity >= 3) {
// abbreviate level 3
level3Str = getAbbreviation(level3Str);
}
if (severity >= 4) {
// abbreviate level 4
level4Str = getAbbreviation(level4Str);
}
if (severity >= 5) {
// remove level 3
level3Str = "";
}
if (severity >= 6) {
// abbreviate level 1
level1Str = getAbbreviation(level1Str);
}
if (severity >= MAX_SEVERITY) {
// remove level 1
level1Str = "";
}
String result = "";
if (level1Str.trim().length() > 0) {
result += level1Str.trim() + ", ";
}
if (level2Str.trim().length() > 0) {
result += level2Str.trim() + ", ";
}
if (level3Str.trim().length() > 0) {
result += level3Str.trim() + ", ";
}
if (level4Str.trim().length() > 0) {
result += level4Str.trim() + ", ";
}
result = result.trim();
if (result.endsWith(",")) {
result = result.substring(0, result.length()-1);
}
return result.trim();
}
/**
* getAbbreviation
*
* @param level2Str String
* @return String
*/
private String getAbbreviation(String string) {
if (string == null || string.length() == 0) {
return "";
}
String result = "";
// check for common state abbreviations first
if (stateAbbreviations.containsKey(string)) {
result = (String) stateAbbreviations.get(string);
} else {
StringTokenizer tok = new StringTokenizer(string, " ");
while (tok.hasMoreElements()) {
result += tok.nextElement().toString().substring(0, 1);
}
}
if (result.length() == 1) {
result = "";
}
return result;
}
public void setLevel1(String level1) {
if (level1 == null) {
level1 = "";
}
this.level1 = level1;
element.setText(getFormatString());
}
public void setLevel2(String level2) {
if (level2 == null) {
level2 = "";
}
this.level2 = level2;
element.setText(getFormatString());
}
public void setLevel3(String level3) {
if (level3 == null) {
level3 = "";
}
this.level3 = level3;
element.setText(getFormatString());
}
public void setLevel4(String level4) {
if (level4 == null) {
level4 = "";
}
this.level4 = level4;
element.setText(getFormatString());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.recovery;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.RetryNTimes;
import org.apache.curator.test.TestingServer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol;
import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.delegation.DelegationKey;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl;
import org.apache.hadoop.yarn.conf.HAUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.records.Version;
import org.apache.hadoop.yarn.server.records.impl.pb.VersionPBImpl;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.AggregateAppResourceUsage;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import javax.crypto.SecretKey;
public class TestZKRMStateStore extends RMStateStoreTestBase {
public static final Log LOG = LogFactory.getLog(TestZKRMStateStore.class);
private static final int ZK_TIMEOUT_MS = 1000;
private TestingServer curatorTestingServer;
private CuratorFramework curatorFramework;
@Before
public void setupCuratorServer() throws Exception {
curatorTestingServer = new TestingServer();
curatorTestingServer.start();
curatorFramework = CuratorFrameworkFactory.builder()
.connectString(curatorTestingServer.getConnectString())
.retryPolicy(new RetryNTimes(100, 100))
.build();
curatorFramework.start();
}
@After
public void cleanupCuratorServer() throws IOException {
curatorFramework.close();
curatorTestingServer.stop();
}
class TestZKRMStateStoreTester implements RMStateStoreHelper {
TestZKRMStateStoreInternal store;
String workingZnode;
class TestZKRMStateStoreInternal extends ZKRMStateStore {
public TestZKRMStateStoreInternal(Configuration conf, String workingZnode)
throws Exception {
init(conf);
start();
assertTrue(znodeWorkingPath.equals(workingZnode));
}
public String getVersionNode() {
return znodeWorkingPath + "/" + ROOT_ZNODE_NAME + "/" + VERSION_NODE;
}
public Version getCurrentVersion() {
return CURRENT_VERSION_INFO;
}
public String getAppNode(String appId) {
return workingZnode + "/" + ROOT_ZNODE_NAME + "/" + RM_APP_ROOT + "/"
+ appId;
}
/**
* Emulating retrying createRootDir not to raise NodeExist exception
* @throws Exception
*/
public void testRetryingCreateRootDir() throws Exception {
create(znodeWorkingPath);
}
}
public RMStateStore getRMStateStore() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
workingZnode = "/jira/issue/3077/rmstore";
conf.set(YarnConfiguration.RM_ZK_ADDRESS,
curatorTestingServer.getConnectString());
conf.set(YarnConfiguration.ZK_RM_STATE_STORE_PARENT_PATH, workingZnode);
this.store = new TestZKRMStateStoreInternal(conf, workingZnode);
return this.store;
}
@Override
public boolean isFinalStateValid() throws Exception {
return 1 ==
curatorFramework.getChildren().forPath(store.znodeWorkingPath).size();
}
@Override
public void writeVersion(Version version) throws Exception {
curatorFramework.setData().withVersion(-1)
.forPath(store.getVersionNode(),
((VersionPBImpl) version).getProto().toByteArray());
}
@Override
public Version getCurrentVersion() throws Exception {
return store.getCurrentVersion();
}
public boolean appExists(RMApp app) throws Exception {
return null != curatorFramework.checkExists()
.forPath(store.getAppNode(app.getApplicationId().toString()));
}
}
@Test (timeout = 60000)
public void testZKRMStateStoreRealZK() throws Exception {
TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester();
testRMAppStateStore(zkTester);
testRMDTSecretManagerStateStore(zkTester);
testCheckVersion(zkTester);
testEpoch(zkTester);
testAppDeletion(zkTester);
testDeleteStore(zkTester);
testRemoveApplication(zkTester);
testAMRMTokenSecretManagerStateStore(zkTester);
((TestZKRMStateStoreTester.TestZKRMStateStoreInternal)
zkTester.getRMStateStore()).testRetryingCreateRootDir();
}
@Test (timeout = 60000)
public void testCheckMajorVersionChange() throws Exception {
TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester() {
Version VERSION_INFO = Version.newInstance(Integer.MAX_VALUE, 0);
@Override
public Version getCurrentVersion() throws Exception {
return VERSION_INFO;
}
@Override
public RMStateStore getRMStateStore() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
workingZnode = "/jira/issue/3077/rmstore";
conf.set(YarnConfiguration.RM_ZK_ADDRESS,
curatorTestingServer.getConnectString());
conf.set(YarnConfiguration.ZK_RM_STATE_STORE_PARENT_PATH, workingZnode);
this.store = new TestZKRMStateStoreInternal(conf, workingZnode) {
Version storedVersion = null;
@Override
public Version getCurrentVersion() {
return VERSION_INFO;
}
@Override
protected synchronized Version loadVersion() throws Exception {
return storedVersion;
}
@Override
protected synchronized void storeVersion() throws Exception {
storedVersion = VERSION_INFO;
}
};
return this.store;
}
};
// default version
RMStateStore store = zkTester.getRMStateStore();
Version defaultVersion = zkTester.getCurrentVersion();
store.checkVersion();
Assert.assertEquals(defaultVersion, store.loadVersion());
}
private Configuration createHARMConf(
String rmIds, String rmId, int adminPort) {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_IDS, rmIds);
conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.RM_STORE, ZKRMStateStore.class.getName());
conf.set(YarnConfiguration.RM_ZK_ADDRESS,
curatorTestingServer.getConnectString());
conf.setInt(YarnConfiguration.RM_ZK_TIMEOUT_MS, ZK_TIMEOUT_MS);
conf.set(YarnConfiguration.RM_HA_ID, rmId);
conf.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
for (String rpcAddress : YarnConfiguration.getServiceAddressConfKeys(conf)) {
for (String id : HAUtil.getRMHAIds(conf)) {
conf.set(HAUtil.addSuffix(rpcAddress, id), "localhost:0");
}
}
conf.set(HAUtil.addSuffix(YarnConfiguration.RM_ADMIN_ADDRESS, rmId),
"localhost:" + adminPort);
return conf;
}
@SuppressWarnings("unchecked")
@Test
public void testFencing() throws Exception {
StateChangeRequestInfo req = new StateChangeRequestInfo(
HAServiceProtocol.RequestSource.REQUEST_BY_USER);
Configuration conf1 = createHARMConf("rm1,rm2", "rm1", 1234);
conf1.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
ResourceManager rm1 = new MockRM(conf1);
rm1.start();
rm1.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm1.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
Configuration conf2 = createHARMConf("rm1,rm2", "rm2", 5678);
conf2.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
ResourceManager rm2 = new MockRM(conf2);
rm2.start();
rm2.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm2.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
for (int i = 0; i < ZK_TIMEOUT_MS / 50; i++) {
if (HAServiceProtocol.HAServiceState.ACTIVE ==
rm1.getRMContext().getRMAdminService().getServiceStatus().getState()) {
Thread.sleep(100);
}
}
assertEquals("RM should have been fenced",
HAServiceProtocol.HAServiceState.STANDBY,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
}
@Test
public void testFencedState() throws Exception {
TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester();
RMStateStore store = zkTester.getRMStateStore();
// Move state to FENCED from ACTIVE
store.updateFencedState();
assertEquals("RMStateStore should have been in fenced state",
true, store.isFencedState());
long submitTime = System.currentTimeMillis();
long startTime = submitTime + 1000;
// Add a new app
RMApp mockApp = mock(RMApp.class);
ApplicationSubmissionContext context =
new ApplicationSubmissionContextPBImpl();
when(mockApp.getSubmitTime()).thenReturn(submitTime);
when(mockApp.getStartTime()).thenReturn(startTime);
when(mockApp.getApplicationSubmissionContext()).thenReturn(context);
when(mockApp.getUser()).thenReturn("test");
store.storeNewApplication(mockApp);
assertEquals("RMStateStore should have been in fenced state",
true, store.isFencedState());
// Add a new attempt
ClientToAMTokenSecretManagerInRM clientToAMTokenMgr =
new ClientToAMTokenSecretManagerInRM();
ApplicationAttemptId attemptId = ConverterUtils
.toApplicationAttemptId("appattempt_1234567894321_0001_000001");
SecretKey clientTokenMasterKey =
clientToAMTokenMgr.createMasterKey(attemptId);
RMAppAttemptMetrics mockRmAppAttemptMetrics =
mock(RMAppAttemptMetrics.class);
Container container = new ContainerPBImpl();
container.setId(ConverterUtils.toContainerId("container_1234567891234_0001_01_000001"));
RMAppAttempt mockAttempt = mock(RMAppAttempt.class);
when(mockAttempt.getAppAttemptId()).thenReturn(attemptId);
when(mockAttempt.getMasterContainer()).thenReturn(container);
when(mockAttempt.getClientTokenMasterKey())
.thenReturn(clientTokenMasterKey);
when(mockAttempt.getRMAppAttemptMetrics())
.thenReturn(mockRmAppAttemptMetrics);
when(mockRmAppAttemptMetrics.getAggregateAppResourceUsage())
.thenReturn(new AggregateAppResourceUsage(0,0));
store.storeNewApplicationAttempt(mockAttempt);
assertEquals("RMStateStore should have been in fenced state",
true, store.isFencedState());
long finishTime = submitTime + 1000;
// Update attempt
ApplicationAttemptStateData newAttemptState =
ApplicationAttemptStateData.newInstance(attemptId, container,
store.getCredentialsFromAppAttempt(mockAttempt),
startTime, RMAppAttemptState.FINISHED, "testUrl",
"test", FinalApplicationStatus.SUCCEEDED, 100,
finishTime, 0, 0);
store.updateApplicationAttemptState(newAttemptState);
assertEquals("RMStateStore should have been in fenced state",
true, store.isFencedState());
// Update app
ApplicationStateData appState = ApplicationStateData.newInstance(submitTime,
startTime, context, "test");
store.updateApplicationState(appState);
assertEquals("RMStateStore should have been in fenced state",
true, store.isFencedState());
// Remove app
store.removeApplication(mockApp);
assertEquals("RMStateStore should have been in fenced state",
true, store.isFencedState());
// store RM delegation token;
RMDelegationTokenIdentifier dtId1 =
new RMDelegationTokenIdentifier(new Text("owner1"),
new Text("renewer1"), new Text("realuser1"));
Long renewDate1 = new Long(System.currentTimeMillis());
dtId1.setSequenceNumber(1111);
store.storeRMDelegationToken(dtId1, renewDate1);
assertEquals("RMStateStore should have been in fenced state", true,
store.isFencedState());
store.updateRMDelegationToken(dtId1, renewDate1);
assertEquals("RMStateStore should have been in fenced state", true,
store.isFencedState());
// remove delegation key;
store.removeRMDelegationToken(dtId1);
assertEquals("RMStateStore should have been in fenced state", true,
store.isFencedState());
// store delegation master key;
DelegationKey key = new DelegationKey(1234, 4321, "keyBytes".getBytes());
store.storeRMDTMasterKey(key);
assertEquals("RMStateStore should have been in fenced state", true,
store.isFencedState());
// remove delegation master key;
store.removeRMDTMasterKey(key);
assertEquals("RMStateStore should have been in fenced state", true,
store.isFencedState());
// store or update AMRMToken;
store.storeOrUpdateAMRMTokenSecretManager(null, false);
assertEquals("RMStateStore should have been in fenced state", true,
store.isFencedState());
store.close();
}
@Test
public void testDuplicateRMAppDeletion() throws Exception {
TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester();
long submitTime = System.currentTimeMillis();
long startTime = System.currentTimeMillis() + 1234;
RMStateStore store = zkTester.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
ApplicationAttemptId attemptIdRemoved = ConverterUtils
.toApplicationAttemptId("appattempt_1352994193343_0002_000001");
ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId();
storeApp(store, appIdRemoved, submitTime, startTime);
storeAttempt(store, attemptIdRemoved,
"container_1352994193343_0002_01_000001", null, null, dispatcher);
ApplicationSubmissionContext context =
new ApplicationSubmissionContextPBImpl();
context.setApplicationId(appIdRemoved);
ApplicationStateData appStateRemoved =
ApplicationStateData.newInstance(
submitTime, startTime, context, "user1");
appStateRemoved.attempts.put(attemptIdRemoved, null);
store.removeApplicationStateInternal(appStateRemoved);
try {
store.removeApplicationStateInternal(appStateRemoved);
} catch (KeeperException.NoNodeException nne) {
Assert.fail("NoNodeException should not happen.");
}
store.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.time;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.Serializable;
import java.text.FieldPosition;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import java.util.TimeZone;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.test.SystemDefaults;
import org.apache.commons.lang3.test.SystemDefaultsSwitch;
import org.junit.Rule;
import org.junit.Test;
/**
* Unit tests {@link org.apache.commons.lang3.time.FastDatePrinter}.
*
* @since 3.0
*/
public class FastDatePrinterTest {
private static final String YYYY_MM_DD = "yyyy/MM/dd";
private static final TimeZone NEW_YORK = TimeZone.getTimeZone("America/New_York");
private static final TimeZone GMT = TimeZone.getTimeZone("GMT");
private static final TimeZone INDIA = TimeZone.getTimeZone("Asia/Calcutta");
private static final Locale SWEDEN = new Locale("sv", "SE");
DatePrinter getInstance(final String format) {
return getInstance(format, TimeZone.getDefault(), Locale.getDefault());
}
private DatePrinter getDateInstance(final int dateStyle, final Locale locale) {
return getInstance(FormatCache.getPatternForStyle(Integer.valueOf(dateStyle), null, locale), TimeZone.getDefault(), Locale.getDefault());
}
private DatePrinter getInstance(final String format, final Locale locale) {
return getInstance(format, TimeZone.getDefault(), locale);
}
private DatePrinter getInstance(final String format, final TimeZone timeZone) {
return getInstance(format, timeZone, Locale.getDefault());
}
/**
* Override this method in derived tests to change the construction of instances
* @param format the format string to use
* @param timeZone the time zone to use
* @param locale the locale to use
* @return the DatePrinter to use for testing
*/
protected DatePrinter getInstance(final String format, final TimeZone timeZone, final Locale locale) {
return new FastDatePrinter(format, timeZone, locale);
}
@Rule
public SystemDefaultsSwitch defaults = new SystemDefaultsSwitch();
@SystemDefaults(timezone="America/New_York", locale="en_US")
@Test
public void testFormat() {
final GregorianCalendar cal1 = new GregorianCalendar(2003, 0, 10, 15, 33, 20);
final GregorianCalendar cal2 = new GregorianCalendar(2003, 6, 10, 9, 0, 0);
final Date date1 = cal1.getTime();
final Date date2 = cal2.getTime();
final long millis1 = date1.getTime();
final long millis2 = date2.getTime();
DatePrinter fdf = getInstance("yyyy-MM-dd'T'HH:mm:ss");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
assertEquals(sdf.format(date1), fdf.format(date1));
assertEquals("2003-01-10T15:33:20", fdf.format(date1));
assertEquals("2003-01-10T15:33:20", fdf.format(cal1));
assertEquals("2003-01-10T15:33:20", fdf.format(millis1));
assertEquals("2003-07-10T09:00:00", fdf.format(date2));
assertEquals("2003-07-10T09:00:00", fdf.format(cal2));
assertEquals("2003-07-10T09:00:00", fdf.format(millis2));
fdf = getInstance("Z");
assertEquals("-0500", fdf.format(date1));
assertEquals("-0500", fdf.format(cal1));
assertEquals("-0500", fdf.format(millis1));
assertEquals("-0400", fdf.format(date2));
assertEquals("-0400", fdf.format(cal2));
assertEquals("-0400", fdf.format(millis2));
fdf = getInstance("ZZ");
assertEquals("-05:00", fdf.format(date1));
assertEquals("-05:00", fdf.format(cal1));
assertEquals("-05:00", fdf.format(millis1));
assertEquals("-04:00", fdf.format(date2));
assertEquals("-04:00", fdf.format(cal2));
assertEquals("-04:00", fdf.format(millis2));
final String pattern = "GGGG GGG GG G yyyy yyy yy y MMMM MMM MM M" +
" dddd ddd dd d DDDD DDD DD D EEEE EEE EE E aaaa aaa aa a zzzz zzz zz z";
fdf = getInstance(pattern);
sdf = new SimpleDateFormat(pattern);
// SDF bug fix starting with Java 7
assertEquals(sdf.format(date1).replaceAll("2003 03 03 03", "2003 2003 03 2003"), fdf.format(date1));
assertEquals(sdf.format(date2).replaceAll("2003 03 03 03", "2003 2003 03 2003"), fdf.format(date2));
}
/**
* Test case for {@link FastDateParser#FastDateParser(String, TimeZone, Locale)}.
*/
@Test
public void testShortDateStyleWithLocales() {
final Locale usLocale = Locale.US;
final Locale swedishLocale = new Locale("sv", "SE");
final Calendar cal = Calendar.getInstance();
cal.set(2004, Calendar.FEBRUARY, 3);
DatePrinter fdf = getDateInstance(FastDateFormat.SHORT, usLocale);
assertEquals("2/3/04", fdf.format(cal));
fdf = getDateInstance(FastDateFormat.SHORT, swedishLocale);
assertEquals("2004-02-03", fdf.format(cal));
}
/**
* Tests that pre-1000AD years get padded with yyyy
*/
@Test
public void testLowYearPadding() {
final Calendar cal = Calendar.getInstance();
final DatePrinter format = getInstance(YYYY_MM_DD);
cal.set(1, Calendar.JANUARY, 1);
assertEquals("0001/01/01", format.format(cal));
cal.set(10, Calendar.JANUARY, 1);
assertEquals("0010/01/01", format.format(cal));
cal.set(100, Calendar.JANUARY, 1);
assertEquals("0100/01/01", format.format(cal));
cal.set(999, Calendar.JANUARY, 1);
assertEquals("0999/01/01", format.format(cal));
}
/**
* Show Bug #39410 is solved
*/
@Test
public void testMilleniumBug() {
final Calendar cal = Calendar.getInstance();
final DatePrinter format = getInstance("dd.MM.yyyy");
cal.set(1000, Calendar.JANUARY, 1);
assertEquals("01.01.1000", format.format(cal));
}
/**
* testLowYearPadding showed that the date was buggy
* This test confirms it, getting 366 back as a date
*/
@Test
public void testSimpleDate() {
final Calendar cal = Calendar.getInstance();
final DatePrinter format = getInstance(YYYY_MM_DD);
cal.set(2004, Calendar.DECEMBER, 31);
assertEquals("2004/12/31", format.format(cal));
cal.set(999, Calendar.DECEMBER, 31);
assertEquals("0999/12/31", format.format(cal));
cal.set(1, Calendar.MARCH, 2);
assertEquals("0001/03/02", format.format(cal));
}
@Test
public void testLang303() {
final Calendar cal = Calendar.getInstance();
cal.set(2004, Calendar.DECEMBER, 31);
DatePrinter format = getInstance(YYYY_MM_DD);
final String output = format.format(cal);
format = SerializationUtils.deserialize(SerializationUtils.serialize((Serializable) format));
assertEquals(output, format.format(cal));
}
@Test
public void testLang538() {
// more commonly constructed with: cal = new GregorianCalendar(2009, 9, 16, 8, 42, 16)
// for the unit test to work in any time zone, constructing with GMT-8 rather than default locale time zone
final GregorianCalendar cal = new GregorianCalendar(TimeZone.getTimeZone("GMT-8"));
cal.clear();
cal.set(2009, Calendar.OCTOBER, 16, 8, 42, 16);
final DatePrinter format = getInstance("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", TimeZone.getTimeZone("GMT"));
assertEquals("dateTime", "2009-10-16T16:42:16.000Z", format.format(cal.getTime()));
assertEquals("dateTime", "2009-10-16T16:42:16.000Z", format.format(cal));
}
@Test
public void testLang645() {
final Locale locale = new Locale("sv", "SE");
final Calendar cal = Calendar.getInstance();
cal.set(2010, Calendar.JANUARY, 1, 12, 0, 0);
final Date d = cal.getTime();
final DatePrinter fdf = getInstance("EEEE', week 'ww", locale);
assertEquals("fredag, week 53", fdf.format(d));
}
@Test
public void testEquals() {
final DatePrinter printer1= getInstance(YYYY_MM_DD);
final DatePrinter printer2= getInstance(YYYY_MM_DD);
assertEquals(printer1, printer2);
assertEquals(printer1.hashCode(), printer2.hashCode());
assertFalse(printer1.equals(new Object()));
}
@Test
public void testToStringContainsName() {
final DatePrinter printer= getInstance(YYYY_MM_DD);
assertTrue(printer.toString().startsWith("FastDate"));
}
@Test
public void testPatternMatches() {
final DatePrinter printer= getInstance(YYYY_MM_DD);
assertEquals(YYYY_MM_DD, printer.getPattern());
}
@Test
public void testLocaleMatches() {
final DatePrinter printer= getInstance(YYYY_MM_DD, SWEDEN);
assertEquals(SWEDEN, printer.getLocale());
}
@Test
public void testTimeZoneMatches() {
final DatePrinter printer= getInstance(YYYY_MM_DD, NEW_YORK);
assertEquals(NEW_YORK, printer.getTimeZone());
}
@SystemDefaults(timezone="UTC")
@Test
public void testTimeZoneAsZ() throws Exception {
final Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
final FastDateFormat noColonFormat = FastDateFormat.getInstance("Z");
assertEquals("+0000", noColonFormat.format(c));
final FastDateFormat isoFormat = FastDateFormat.getInstance("ZZ");
assertEquals("Z", isoFormat.format(c));
final FastDateFormat colonFormat = FastDateFormat.getInstance("ZZZ");
assertEquals("+00:00", colonFormat.format(c));
}
private static Calendar initializeCalendar(final TimeZone tz) {
final Calendar cal = Calendar.getInstance(tz);
cal.set(Calendar.YEAR, 2001);
cal.set(Calendar.MONTH, 1); // not daylight savings
cal.set(Calendar.DAY_OF_MONTH, 4);
cal.set(Calendar.HOUR_OF_DAY, 12);
cal.set(Calendar.MINUTE, 8);
cal.set(Calendar.SECOND, 56);
cal.set(Calendar.MILLISECOND, 235);
return cal;
}
@Test(expected = IllegalArgumentException.class)
public void test1806Argument() {
getInstance("XXXX");
}
private static enum Expected1806 {
India(INDIA, "+05", "+0530", "+05:30"), Greenwich(GMT, "Z", "Z", "Z"), NewYork(
NEW_YORK, "-05", "-0500", "-05:00");
private Expected1806(final TimeZone zone, final String one, final String two, final String three) {
this.zone = zone;
this.one = one;
this.two = two;
this.three = three;
}
final TimeZone zone;
final String one;
final String two;
final String three;
}
@Test
public void test1806() throws ParseException {
for (final Expected1806 trial : Expected1806.values()) {
final Calendar cal = initializeCalendar(trial.zone);
DatePrinter printer = getInstance("X", trial.zone);
assertEquals(trial.one, printer.format(cal));
printer = getInstance("XX", trial.zone);
assertEquals(trial.two, printer.format(cal));
printer = getInstance("XXX", trial.zone);
assertEquals(trial.three, printer.format(cal));
}
}
@Test
public void testLang1103() throws ParseException {
final Calendar cal = Calendar.getInstance(SWEDEN);
cal.set(Calendar.DAY_OF_MONTH, 2);
assertEquals("2", getInstance("d", SWEDEN).format(cal));
assertEquals("02", getInstance("dd", SWEDEN).format(cal));
assertEquals("002", getInstance("ddd", SWEDEN).format(cal));
assertEquals("0002", getInstance("dddd", SWEDEN).format(cal));
assertEquals("00002", getInstance("ddddd", SWEDEN).format(cal));
}
/**
* According to LANG-916 (https://issues.apache.org/jira/browse/LANG-916),
* the format method did contain a bug: it did not use the TimeZone data.
*
* This method test that the bug is fixed.
*/
@Test
public void testLang916() throws Exception {
final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("Europe/Paris"));
cal.clear();
cal.set(2009, 9, 16, 8, 42, 16);
// calendar fast.
{
final String value = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss Z", TimeZone.getTimeZone("Europe/Paris")).format(cal);
assertEquals("calendar", "2009-10-16T08:42:16 +0200", value);
}
{
final String value = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss Z", TimeZone.getTimeZone("Asia/Kolkata")).format(cal);
assertEquals("calendar", "2009-10-16T12:12:16 +0530", value);
}
{
final String value = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss Z", TimeZone.getTimeZone("Europe/London")).format(cal);
assertEquals("calendar", "2009-10-16T07:42:16 +0100", value);
}
}
@Test
public void testHourFormats() {
final Calendar calendar = Calendar.getInstance();
calendar.clear();
final DatePrinter printer = getInstance("K k H h");
calendar.set(Calendar.HOUR_OF_DAY, 0);
assertEquals("0 24 0 12", printer.format(calendar));
calendar.set(Calendar.HOUR_OF_DAY, 12);
assertEquals("0 12 12 12", printer.format(calendar));
calendar.set(Calendar.HOUR_OF_DAY, 23);
assertEquals("11 23 23 11", printer.format(calendar));
}
@SuppressWarnings("deprecation")
@Test
public void testStringBufferOptions() {
final DatePrinter format = getInstance("yyyy-MM-dd HH:mm:ss.SSS Z", TimeZone.getTimeZone("GMT"));
final Calendar calendar = Calendar.getInstance();
final StringBuffer sb = new StringBuffer();
final String expected = format.format(calendar, sb, new FieldPosition(0)).toString();
sb.setLength(0);
assertEquals(expected, format.format(calendar, sb).toString());
sb.setLength(0);
final Date date = calendar.getTime();
assertEquals(expected, format.format(date, sb, new FieldPosition(0)).toString());
sb.setLength(0);
assertEquals(expected, format.format(date, sb).toString());
sb.setLength(0);
final long epoch = date.getTime();
assertEquals(expected, format.format(epoch, sb, new FieldPosition(0)).toString());
sb.setLength(0);
assertEquals(expected, format.format(epoch, sb).toString());
}
@Test
public void testAppendableOptions() {
final DatePrinter format = getInstance("yyyy-MM-dd HH:mm:ss.SSS Z", TimeZone.getTimeZone("GMT"));
final Calendar calendar = Calendar.getInstance();
final StringBuilder sb = new StringBuilder();
final String expected = format.format(calendar, sb).toString();
sb.setLength(0);
final Date date = calendar.getTime();
assertEquals(expected, format.format(date, sb).toString());
sb.setLength(0);
final long epoch = date.getTime();
assertEquals(expected, format.format(epoch, sb).toString());
}
@Test
public void testDayNumberOfWeek() {
final DatePrinter printer = getInstance("u");
final Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
assertEquals("1", printer.format(calendar.getTime()));
calendar.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
assertEquals("6", printer.format(calendar.getTime()));
calendar.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
assertEquals("7", printer.format(calendar.getTime()));
}
}
| |
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.type.tree;
import java.io.IOException;
import com.orientechnologies.orient.core.index.mvrbtree.OMVRBTreeEntry;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.exception.OSerializationException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.type.tree.provider.OIdentityChangedListener;
import com.orientechnologies.orient.core.type.tree.provider.OMVRBTreeEntryDataProvider;
/**
*
* Serialized as:
* <table>
* <tr>
* <td>FROM</td>
* <td>TO</td>
* <td>FIELD</td>
* </tr>
* <tr>
* <td>00</td>
* <td>04</td>
* <td>PAGE SIZE</td>
* </tr>
* <tr>
* <td>04</td>
* <td>14</td>
* <td>PARENT RID</td>
* </tr>
* <tr>
* <td>14</td>
* <td>24</td>
* <td>LEFT RID</td>
* </tr>
* <tr>
* <td>24</td>
* <td>34</td>
* <td>RIGHT RID</td>
* </tr>
* <tr>
* <td>34</td>
* <td>35</td>
* <td>COLOR</td>
* </tr>
* <tr>
* <td>35</td>
* <td>37</td>
* <td>SIZE</td>
* </tr>
* </table>
* VARIABLE
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*
* @param <K>
* @param <V>
*/
public class OMVRBTreeEntryPersistent<K, V> extends OMVRBTreeEntry<K, V> implements OIdentityChangedListener {
protected OMVRBTreeEntryDataProvider<K, V> dataProvider;
protected OMVRBTreePersistent<K, V> pTree;
protected OMVRBTreeEntryPersistent<K, V> parent;
protected OMVRBTreeEntryPersistent<K, V> left;
protected OMVRBTreeEntryPersistent<K, V> right;
/**
* Called upon unmarshalling.
*
* @param iTree
* Tree which belong
* @param iParent
* Parent node if any
* @param iRecordId
* Record to unmarshall
*/
public OMVRBTreeEntryPersistent(final OMVRBTreePersistent<K, V> iTree, final OMVRBTreeEntryPersistent<K, V> iParent,
final ORID iRecordId) {
super(iTree);
pTree = iTree;
dataProvider = pTree.dataProvider.getEntry(iRecordId);
dataProvider.setIdentityChangedListener(this);
init();
parent = iParent;
// setParent(iParent);
pTree.addNodeInMemory(this);
}
/**
* Make a new cell with given key, value, and parent, and with <tt>null</tt> child links, and BLACK color.
*/
public OMVRBTreeEntryPersistent(final OMVRBTreePersistent<K, V> iTree, final K iKey, final V iValue,
final OMVRBTreeEntryPersistent<K, V> iParent) {
super(iTree);
pTree = iTree;
dataProvider = pTree.dataProvider.createEntry();
dataProvider.setIdentityChangedListener(this);
dataProvider.insertAt(0, iKey, iValue);
init();
setParent(iParent);
pTree.addNodeInMemory(this);
// created entry : force dispatch dirty node.
markDirty();
}
/**
* Called on event of splitting an entry. Copy values from the parent node.
*
* @param iParent
* Parent node
* @param iPosition
* Current position
*/
public OMVRBTreeEntryPersistent(final OMVRBTreeEntry<K, V> iParent, final int iPosition) {
super(((OMVRBTreeEntryPersistent<K, V>) iParent).getTree());
pTree = (OMVRBTreePersistent<K, V>) tree;
OMVRBTreeEntryPersistent<K, V> pParent = (OMVRBTreeEntryPersistent<K, V>) iParent;
dataProvider = pTree.dataProvider.createEntry();
dataProvider.setIdentityChangedListener(this);
dataProvider.copyDataFrom(pParent.dataProvider, iPosition);
if (pParent.dataProvider.truncate(iPosition))
pParent.markDirty();
init();
setParent(pParent);
pTree.addNodeInMemory(this);
// created entry : force dispatch dirty node.
markDirty();
}
public OMVRBTreeEntryDataProvider<K, V> getProvider() {
return dataProvider;
}
/**
* Assures that all the links versus parent, left and right are consistent.
*
*/
public OMVRBTreeEntryPersistent<K, V> save() throws OSerializationException {
if (!dataProvider.isEntryDirty())
return this;
final boolean isNew = dataProvider.getIdentity().isNew();
// FOR EACH NEW LINK, SAVE BEFORE
if (left != null && left.dataProvider.getIdentity().isNew()) {
if (isNew) {
// TEMPORARY INCORRECT SAVE FOR GETTING AN ID. WILL BE SET DIRTY AGAIN JUST AFTER
left.dataProvider.save();
} else
left.save();
}
if (right != null && right.dataProvider.getIdentity().isNew()) {
if (isNew) {
// TEMPORARY INCORRECT SAVE FOR GETTING AN ID. WILL BE SET DIRTY AGAIN JUST AFTER
right.dataProvider.save();
} else
right.save();
}
if (parent != null && parent.dataProvider.getIdentity().isNew()) {
if (isNew) {
// TEMPORARY INCORRECT SAVE FOR GETTING AN ID. WILL BE SET DIRTY AGAIN JUST AFTER
parent.dataProvider.save();
} else
parent.save();
}
dataProvider.save();
// if (parent != null)
// if (!parent.record.getIdentity().equals(parentRid))
// OLogManager.instance().error(this,
// "[save]: Tree node %s has parentRid '%s' different by the rid of the assigned parent node: %s", record.getIdentity(),
// parentRid, parent.record.getIdentity());
checkEntryStructure();
if (pTree.searchNodeInCache(dataProvider.getIdentity()) != this) {
// UPDATE THE CACHE
pTree.addNodeInMemory(this);
}
return this;
}
/**
* Delete all the nodes recursively. IF they are not loaded in memory, load all the tree.
*
* @throws IOException
*/
public OMVRBTreeEntryPersistent<K, V> delete() throws IOException {
if (dataProvider != null) {
pTree.removeNodeFromMemory(this);
pTree.removeEntry(dataProvider.getIdentity());
// EARLY LOAD LEFT AND DELETE IT RECURSIVELY
if (getLeft() != null)
((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete();
// EARLY LOAD RIGHT AND DELETE IT RECURSIVELY
if (getRight() != null)
((OMVRBTreeEntryPersistent<K, V>) getRight()).delete();
// DELETE MYSELF
dataProvider.removeIdentityChangedListener(this);
dataProvider.delete();
clear();
}
return this;
}
/**
* Disconnect the current node from others.
*
* @param iForceDirty
* Force disconnection also if the record it's dirty
* @param iLevel
* @return count of nodes that has been disconnected
*/
protected int disconnect(final boolean iForceDirty, final int iLevel) {
if (dataProvider == null)
// DIRTY NODE, JUST REMOVE IT
return 1;
int totalDisconnected = 0;
final ORID rid = dataProvider.getIdentity();
boolean disconnectedFromParent = false;
if (parent != null) {
// DISCONNECT RECURSIVELY THE PARENT NODE
if (canDisconnectFrom(parent) || iForceDirty) {
if (parent.left == this) {
parent.left = null;
} else if (parent.right == this) {
parent.right = null;
} else
OLogManager.instance().warn(this,
"Node " + rid + " has the parent (" + parent + ") unlinked to itself. It links to " + parent);
totalDisconnected += parent.disconnect(iForceDirty, iLevel + 1);
parent = null;
disconnectedFromParent = true;
}
} else {
disconnectedFromParent = true;
}
boolean disconnectedFromLeft = false;
if (left != null) {
// DISCONNECT RECURSIVELY THE LEFT NODE
if (canDisconnectFrom(left) || iForceDirty) {
if (left.parent == this)
left.parent = null;
else
OLogManager.instance().warn(this,
"Node " + rid + " has the left (" + left + ") unlinked to itself. It links to " + left.parent);
totalDisconnected += left.disconnect(iForceDirty, iLevel + 1);
left = null;
disconnectedFromLeft = true;
}
} else {
disconnectedFromLeft = true;
}
boolean disconnectedFromRight = false;
if (right != null) {
// DISCONNECT RECURSIVELY THE RIGHT NODE
if (canDisconnectFrom(right) || iForceDirty) {
if (right.parent == this)
right.parent = null;
else
OLogManager.instance().warn(this,
"Node " + rid + " has the right (" + right + ") unlinked to itself. It links to " + right.parent);
totalDisconnected += right.disconnect(iForceDirty, iLevel + 1);
right = null;
disconnectedFromRight = true;
}
} else {
disconnectedFromLeft = true;
}
if (disconnectedFromParent && disconnectedFromLeft && disconnectedFromRight)
if ((!dataProvider.isEntryDirty() && !dataProvider.getIdentity().isTemporary() || iForceDirty)
&& !pTree.isNodeEntryPoint(this)) {
totalDisconnected++;
pTree.removeNodeFromMemory(this);
clear();
}
return totalDisconnected;
}
private boolean canDisconnectFrom(OMVRBTreeEntryPersistent<K, V> entry) {
return dataProvider == null || !dataProvider.getIdentity().isNew() && !entry.dataProvider.getIdentity().isNew();
}
protected void clear() {
// SPEED UP MEMORY CLAIM BY RESETTING INTERNAL FIELDS
pTree = null;
tree = null;
dataProvider.removeIdentityChangedListener(this);
dataProvider.clear();
dataProvider = null;
}
/**
* Clear links and current node only if it's not an entry point.
*
* @param iForceDirty
*
* @param iSource
*/
protected int disconnectLinked(final boolean iForce) {
return disconnect(iForce, 0);
}
public int getDepthInMemory() {
int level = 0;
OMVRBTreeEntryPersistent<K, V> entry = this;
while (entry.parent != null) {
level++;
entry = entry.parent;
}
return level;
}
@Override
public int getDepth() {
int level = 0;
OMVRBTreeEntryPersistent<K, V> entry = this;
while (entry.getParent() != null) {
level++;
entry = (OMVRBTreeEntryPersistent<K, V>) entry.getParent();
}
return level;
}
@Override
public OMVRBTreeEntry<K, V> getParent() {
if (dataProvider == null)
return null;
if (parent == null && dataProvider.getParent().isValid()) {
// System.out.println("Node " + record.getIdentity() + " is loading PARENT node " + parentRid + "...");
// LAZY LOADING OF THE PARENT NODE
parent = pTree.loadEntry(null, dataProvider.getParent());
checkEntryStructure();
if (parent != null) {
// TRY TO ASSIGN IT FOLLOWING THE RID
if (parent.dataProvider.getLeft().isValid() && parent.dataProvider.getLeft().equals(dataProvider.getIdentity()))
parent.left = this;
else if (parent.dataProvider.getRight().isValid() && parent.dataProvider.getRight().equals(dataProvider.getIdentity()))
parent.right = this;
else {
OLogManager.instance().error(this, "getParent: Cannot assign node %s to parent. Nodes parent-left=%s, parent-right=%s",
dataProvider.getParent(), parent.dataProvider.getLeft(), parent.dataProvider.getRight());
}
}
}
return parent;
}
@Override
public OMVRBTreeEntry<K, V> setParent(final OMVRBTreeEntry<K, V> iParent) {
if (iParent != parent) {
OMVRBTreeEntryPersistent<K, V> newParent = (OMVRBTreeEntryPersistent<K, V>) iParent;
ORID newParentId = iParent == null ? ORecordId.EMPTY_RECORD_ID : newParent.dataProvider.getIdentity();
parent = newParent;
if (dataProvider.setParent(newParentId))
markDirty();
if (parent != null) {
ORID thisRid = dataProvider.getIdentity();
if (parent.left == this && !parent.dataProvider.getLeft().equals(thisRid))
if (parent.dataProvider.setLeft(thisRid))
parent.markDirty();
if (parent.left != this && parent.dataProvider.getLeft().isValid() && parent.dataProvider.getLeft().equals(thisRid))
parent.left = this;
if (parent.right == this && !parent.dataProvider.getRight().equals(thisRid))
if (parent.dataProvider.setRight(thisRid))
parent.markDirty();
if (parent.right != this && parent.dataProvider.getRight().isValid() && parent.dataProvider.getRight().equals(thisRid))
parent.right = this;
}
}
return iParent;
}
@Override
public OMVRBTreeEntry<K, V> getLeft() {
if (dataProvider == null)
return null;
if (left == null && dataProvider.getLeft().isValid()) {
// LAZY LOADING OF THE LEFT LEAF
left = pTree.loadEntry(this, dataProvider.getLeft());
checkEntryStructure();
}
return left;
}
@Override
public void setLeft(final OMVRBTreeEntry<K, V> iLeft) {
if (iLeft != left) {
OMVRBTreeEntryPersistent<K, V> newLeft = (OMVRBTreeEntryPersistent<K, V>) iLeft;
ORID newLeftId = iLeft == null ? ORecordId.EMPTY_RECORD_ID : newLeft.dataProvider.getIdentity();
left = newLeft;
if (dataProvider.setLeft(newLeftId))
markDirty();
if (left != null && left.parent != this)
left.setParent(this);
checkEntryStructure();
}
}
@Override
public OMVRBTreeEntry<K, V> getRight() {
if (dataProvider == null)
return null;
if (right == null && dataProvider.getRight().isValid()) {
// LAZY LOADING OF THE RIGHT LEAF
right = pTree.loadEntry(this, dataProvider.getRight());
checkEntryStructure();
}
return right;
}
@Override
public void setRight(final OMVRBTreeEntry<K, V> iRight) {
if (iRight != right) {
OMVRBTreeEntryPersistent<K, V> newRight = (OMVRBTreeEntryPersistent<K, V>) iRight;
ORID newRightId = iRight == null ? ORecordId.EMPTY_RECORD_ID : newRight.dataProvider.getIdentity();
right = newRight;
if (dataProvider.setRight(newRightId))
markDirty();
if (right != null && right.parent != this)
right.setParent(this);
checkEntryStructure();
}
}
public void checkEntryStructure() {
if (!tree.isRuntimeCheckEnabled())
return;
if (dataProvider.getParent() == null)
OLogManager.instance().error(this, "checkEntryStructure: Node %s has parentRid null!\n", this);
if (dataProvider.getLeft() == null)
OLogManager.instance().error(this, "checkEntryStructure: Node %s has leftRid null!\n", this);
if (dataProvider.getRight() == null)
OLogManager.instance().error(this, "checkEntryStructure: Node %s has rightRid null!\n", this);
if (this == left || dataProvider.getIdentity().isValid() && dataProvider.getIdentity().equals(dataProvider.getLeft()))
OLogManager.instance().error(this, "checkEntryStructure: Node %s has left that points to itself!\n", this);
if (this == right || dataProvider.getIdentity().isValid() && dataProvider.getIdentity().equals(dataProvider.getRight()))
OLogManager.instance().error(this, "checkEntryStructure: Node %s has right that points to itself!\n", this);
if (left != null && left == right)
OLogManager.instance().error(this, "checkEntryStructure: Node %s has left and right equals!\n", this);
if (left != null) {
if (!left.dataProvider.getIdentity().equals(dataProvider.getLeft()))
OLogManager.instance().error(this, "checkEntryStructure: Wrong left node loaded: " + dataProvider.getLeft());
if (left.parent != this)
OLogManager.instance().error(this,
"checkEntryStructure: Left node is not correctly connected to the parent" + dataProvider.getLeft());
}
if (right != null) {
if (!right.dataProvider.getIdentity().equals(dataProvider.getRight()))
OLogManager.instance().error(this, "checkEntryStructure: Wrong right node loaded: " + dataProvider.getRight());
if (right.parent != this)
OLogManager.instance().error(this,
"checkEntryStructure: Right node is not correctly connected to the parent" + dataProvider.getRight());
}
}
@Override
protected void copyFrom(final OMVRBTreeEntry<K, V> iSource) {
if (dataProvider.copyFrom(((OMVRBTreeEntryPersistent<K, V>) iSource).dataProvider))
markDirty();
}
@Override
protected void insert(final int iIndex, final K iKey, final V iValue) {
K oldKey = iIndex == 0 ? dataProvider.getKeyAt(0) : null;
if (dataProvider.insertAt(iIndex, iKey, iValue))
markDirty();
if (iIndex == 0)
pTree.updateEntryPoint(oldKey, this);
}
@Override
protected void remove() {
final int index = tree.getPageIndex();
final K oldKey = index == 0 ? getKeyAt(0) : null;
if (dataProvider.removeAt(index))
markDirty();
tree.setPageIndex(index - 1);
if (index == 0)
pTree.updateEntryPoint(oldKey, this);
}
@Override
public K getKeyAt(final int iIndex) {
return dataProvider.getKeyAt(iIndex);
}
@Override
protected V getValueAt(final int iIndex) {
return dataProvider.getValueAt(iIndex);
}
/**
* Invalidate serialized Value associated in order to be re-marshalled on the next node storing.
*/
public V setValue(final V iValue) {
V oldValue = getValue();
int index = tree.getPageIndex();
if (dataProvider.setValueAt(index, iValue))
markDirty();
return oldValue;
}
public int getSize() {
return dataProvider != null ? dataProvider.getSize() : 0;
}
public int getPageSize() {
return dataProvider.getPageSize();
}
public int getMaxDepthInMemory() {
return getMaxDepthInMemory(0);
}
private int getMaxDepthInMemory(final int iCurrDepthLevel) {
int depth;
if (left != null)
// GET THE LEFT'S DEPTH LEVEL AS GOOD
depth = left.getMaxDepthInMemory(iCurrDepthLevel + 1);
else
// GET THE CURRENT DEPTH LEVEL AS GOOD
depth = iCurrDepthLevel;
if (right != null) {
int rightDepth = right.getMaxDepthInMemory(iCurrDepthLevel + 1);
if (rightDepth > depth)
depth = rightDepth;
}
return depth;
}
/**
* Returns the successor of the current Entry only by traversing the memory, or null if no such.
*/
@Override
public OMVRBTreeEntryPersistent<K, V> getNextInMemory() {
OMVRBTreeEntryPersistent<K, V> t = this;
OMVRBTreeEntryPersistent<K, V> p = null;
if (t.right != null) {
p = t.right;
while (p.left != null)
p = p.left;
} else {
p = t.parent;
while (p != null && t == p.right) {
t = p;
p = p.parent;
}
}
return p;
}
@Override
public boolean getColor() {
return dataProvider.getColor();
}
@Override
protected void setColor(final boolean iColor) {
if (dataProvider.setColor(iColor))
markDirty();
}
public void markDirty() {
pTree.signalNodeChanged(this);
}
@Override
protected OMVRBTreeEntry<K, V> getLeftInMemory() {
return left;
}
@Override
protected OMVRBTreeEntry<K, V> getParentInMemory() {
return parent;
}
@Override
protected OMVRBTreeEntry<K, V> getRightInMemory() {
return right;
}
public void onIdentityChanged(ORID rid) {
if (left != null) {
if (left.dataProvider.setParent(rid))
left.markDirty();
}
if (right != null) {
if (right.dataProvider.setParent(rid))
right.markDirty();
}
if (parent != null) {
if (parent.left == this) {
if (parent.dataProvider.setLeft(rid))
parent.markDirty();
} else if (parent.right == this) {
if (parent.dataProvider.setRight(rid))
parent.markDirty();
} else {
OLogManager.instance().error(this, "[save]: Tree inconsistent entries.");
}
} else if (pTree.getRoot() == this) {
if (pTree.dataProvider.setRoot(rid))
pTree.markDirty();
}
}
}
| |
package jp.sourceforge.ea2ddl.dao.bsbhv;
import java.util.List;
import org.seasar.dbflute.*;
import org.seasar.dbflute.cbean.ConditionBean;
import org.seasar.dbflute.cbean.EntityRowHandler;
import org.seasar.dbflute.cbean.ListResultBean;
import org.seasar.dbflute.cbean.PagingBean;
import org.seasar.dbflute.cbean.PagingHandler;
import org.seasar.dbflute.cbean.PagingInvoker;
import org.seasar.dbflute.cbean.PagingResultBean;
import org.seasar.dbflute.cbean.ResultBeanBuilder;
import org.seasar.dbflute.dbmeta.DBMeta;
import org.seasar.dbflute.jdbc.StatementConfig;
import jp.sourceforge.ea2ddl.dao.allcommon.*;
import jp.sourceforge.ea2ddl.dao.exentity.*;
import jp.sourceforge.ea2ddl.dao.bsentity.dbmeta.*;
import jp.sourceforge.ea2ddl.dao.cbean.*;
/**
* The behavior of t_xref that the type is TABLE. <br />
* <pre>
* [primary-key]
*
*
* [column]
* XrefID, Name, Type, Visibility, Namespace, Requirement, Constraint, Behavior, Partition, Description, Client, Supplier, Link
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign-table]
*
*
* [referrer-table]
*
*
* [foreign-property]
*
*
* [referrer-property]
*
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsTXrefBhv extends org.seasar.dbflute.bhv.AbstractBehaviorReadable {
// ===================================================================================
// Definition
// ==========
/*df:BehaviorQueryPathBegin*/
/*df:BehaviorQueryPathEnd*/
// ===================================================================================
// Table name
// ==========
/** @return The name on database of table. (NotNull) */
public String getTableDbName() { return "t_xref"; }
// ===================================================================================
// DBMeta
// ======
/** @return The instance of DBMeta. (NotNull) */
public DBMeta getDBMeta() { return TXrefDbm.getInstance(); }
/** @return The instance of DBMeta as my table type. (NotNull) */
public TXrefDbm getMyDBMeta() { return TXrefDbm.getInstance(); }
// ===================================================================================
// New Instance
// ============
public Entity newEntity() { return newMyEntity(); }
public ConditionBean newConditionBean() { return newMyConditionBean(); }
public TXref newMyEntity() { return new TXref(); }
public TXrefCB newMyConditionBean() { return new TXrefCB(); }
// ===================================================================================
// Current DBDef
// =============
@Override
protected DBDef getCurrentDBDef() {
return DBCurrent.getInstance().currentDBDef();
}
// ===================================================================================
// Default StatementConfig
// =======================
@Override
protected StatementConfig getDefaultStatementConfig() {
return DBFluteConfig.getInstance().getDefaultStatementConfig();
}
// ===================================================================================
// Count Select
// ============
/**
* Select the count by the condition-bean. {IgnorePagingCondition}
* @param cb The condition-bean of TXref. (NotNull)
* @return The selected count.
*/
public int selectCount(TXrefCB cb) {
assertCBNotNull(cb);
return delegateSelectCount(cb);
}
// ===================================================================================
// Cursor Select
// =============
/**
* Select the cursor by the condition-bean. <br />
* Attention: It has a mapping cost from result set to entity.
* @param cb The condition-bean of TXref. (NotNull)
* @param entityRowHandler The handler of entity row of TXref. (NotNull)
*/
public void selectCursor(TXrefCB cb, EntityRowHandler<TXref> entityRowHandler) {
assertCBNotNull(cb); assertObjectNotNull("entityRowHandler<TXref>", entityRowHandler);
delegateSelectCursor(cb, entityRowHandler);
}
// ===================================================================================
// Entity Select
// =============
/**
* Select the entity by the condition-bean.
* @param cb The condition-bean of TXref. (NotNull)
* @return The selected entity. (Nullalble)
* @exception org.seasar.dbflute.exception.EntityDuplicatedException When the entity has been duplicated.
*/
public TXref selectEntity(final TXrefCB cb) {
return helpSelectEntityInternally(cb, new InternalSelectEntityCallback<TXref, TXrefCB>() {
public List<TXref> callbackSelectList(TXrefCB cb) { return selectList(cb); } });
}
/**
* Select the entity by the condition-bean with deleted check.
* @param cb The condition-bean of TXref. (NotNull)
* @return The selected entity. (NotNull)
* @exception org.seasar.dbflute.exception.EntityAlreadyDeletedException When the entity has already been deleted.
* @exception org.seasar.dbflute.exception.EntityDuplicatedException When the entity has been duplicated.
*/
public TXref selectEntityWithDeletedCheck(final TXrefCB cb) {
return helpSelectEntityWithDeletedCheckInternally(cb, new InternalSelectEntityWithDeletedCheckCallback<TXref, TXrefCB>() {
public List<TXref> callbackSelectList(TXrefCB cb) { return selectList(cb); } });
}
// ===================================================================================
// List Select
// ===========
/**
* Select the list as result bean.
* @param cb The condition-bean of TXref. (NotNull)
* @return The result bean of selected list. (NotNull)
*/
public ListResultBean<TXref> selectList(TXrefCB cb) {
assertCBNotNull(cb);
return new ResultBeanBuilder<TXref>(getTableDbName()).buildListResultBean(cb, delegateSelectList(cb));
}
// ===================================================================================
// Page Select
// ===========
/**
* Select the page as result bean.
* @param cb The condition-bean of TXref. (NotNull)
* @return The result bean of selected page. (NotNull)
*/
public PagingResultBean<TXref> selectPage(final TXrefCB cb) {
assertCBNotNull(cb);
final PagingInvoker<TXref> invoker = new PagingInvoker<TXref>(getTableDbName());
final PagingHandler<TXref> handler = new PagingHandler<TXref>() {
public PagingBean getPagingBean() { return cb; }
public int count() { return selectCount(cb); }
public List<TXref> paging() { return selectList(cb); }
};
return invoker.invokePaging(handler);
}
// ===================================================================================
// Scalar Select
// =============
/**
* Select the scalar value derived by a function. <br />
* Call a function method after this method called like as follows:
* <pre>
* tXrefBhv.scalarSelect(Date.class).max(new ScalarQuery(TXrefCB cb) {
* cb.specify().columnXxxDatetime(); // the required specification of target column
* cb.query().setXxxName_PrefixSearch("S"); // query as you like it
* });
* </pre>
* @param <RESULT> The type of result.
* @param resultType The type of result. (NotNull)
* @return The scalar value derived by a function. (Nullable)
*/
public <RESULT> SLFunction<TXrefCB, RESULT> scalarSelect(Class<RESULT> resultType) {
TXrefCB cb = newMyConditionBean();
cb.xsetupForScalarSelect();
cb.getSqlClause().disableSelectIndex(); // for when you use union
return new SLFunction<TXrefCB, RESULT>(cb, resultType);
}
// ===================================================================================
// Pull out Foreign
// ================
// ===================================================================================
// Delegate Method
// ===============
// [Behavior Command]
// -----------------------------------------------------
// Select
// ------
protected int delegateSelectCount(TXrefCB cb) { return invoke(createSelectCountCBCommand(cb)); }
protected void delegateSelectCursor(TXrefCB cb, EntityRowHandler<TXref> entityRowHandler)
{ invoke(createSelectCursorCBCommand(cb, entityRowHandler, TXref.class)); }
protected int doCallReadCount(ConditionBean cb) { return delegateSelectCount((TXrefCB)cb); }
protected List<TXref> delegateSelectList(TXrefCB cb)
{ return invoke(createSelectListCBCommand(cb, TXref.class)); }
@SuppressWarnings("unchecked")
protected List<Entity> doCallReadList(ConditionBean cb) { return (List)delegateSelectList((TXrefCB)cb); }
// ===================================================================================
// Optimistic Lock Info
// ====================
@Override
protected boolean hasVersionNoValue(Entity entity) {
return false;
}
@Override
protected boolean hasUpdateDateValue(Entity entity) {
return false;
}
// ===================================================================================
// Helper
// ======
protected TXref downcast(Entity entity) {
return helpDowncastInternally(entity, TXref.class);
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2015 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.server;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.io.Connection;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.eclipse.jetty.server.handler.HandlerWrapper;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class AbstractConnectorTest
{
private static final Logger LOG = Log.getLogger(AbstractConnectorTest.class);
private static Server _server;
private static AbstractConnector _connector;
private static CyclicBarrier _connect;
private static CountDownLatch _closed;
private Socket[] _socket;
private PrintWriter[] _out;
private BufferedReader[] _in;
@BeforeClass
public static void init() throws Exception
{
_connect = new CyclicBarrier(2);
_server = new Server();
_connector = new SelectChannelConnector()
{
public void connectionClosed(Connection connection)
{
super.connectionClosed(connection);
_closed.countDown();
}
};
_connector.setStatsOn(true);
_server.addConnector(_connector);
HandlerWrapper wrapper = new HandlerWrapper()
{
public void handle(String path, Request request, HttpServletRequest httpRequest, HttpServletResponse httpResponse) throws IOException, ServletException
{
try
{
_connect.await();
}
catch (Exception ex)
{
LOG.debug(ex);
}
finally
{
super.handle(path, request, httpRequest, httpResponse);
}
}
};
_server.setHandler(wrapper);
Handler handler = new AbstractHandler()
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException
{
try{Thread.sleep(1);} catch(Exception e){}
baseRequest.setHandled(true);
PrintWriter out = response.getWriter();
out.write("Server response\n");
out.close();
response.setStatus(HttpServletResponse.SC_OK);
}
};
wrapper.setHandler(handler);
_server.start();
}
@AfterClass
public static void destroy() throws Exception
{
_server.stop();
_server.join();
}
@Before
public void reset()
{
_connector.statsReset();
}
@Test
public void testSingleRequest() throws Exception
{
doInit(1);
sendRequest(1, 1);
doClose(1);
assertEquals(1, _connector.getConnections());
assertEquals(0, _connector.getConnectionsOpen());
assertEquals(1, _connector.getConnectionsOpenMax());
assertTrue(_connector.getConnectionsOpen() <= _connector.getConnectionsOpenMax());
assertTrue(_connector.getConnectionsDurationMean() > 0);
assertTrue(_connector.getConnectionsDurationMax() > 0);
assertTrue(_connector.getConnectionsDurationMean() <= _connector.getConnectionsDurationMax());
assertEquals(1, _connector.getRequests());
assertEquals(1.0, _connector.getConnectionsRequestsMean(), 0.01);
assertEquals(1, _connector.getConnectionsRequestsMax());
assertTrue(_connector.getConnectionsRequestsMean() <= _connector.getConnectionsRequestsMax());
}
@Test
public void testMultipleRequests() throws Exception
{
doInit(1);
sendRequest(1, 1);
sendRequest(1, 1);
doClose(1);
assertEquals(1, _connector.getConnections());
assertEquals(0, _connector.getConnectionsOpen());
assertEquals(1, _connector.getConnectionsOpenMax());
assertTrue(_connector.getConnectionsOpen() <= _connector.getConnectionsOpenMax());
assertTrue(_connector.getConnectionsDurationMean() > 0);
assertTrue(_connector.getConnectionsDurationMax() > 0);
assertTrue(_connector.getConnectionsDurationMean() <= _connector.getConnectionsDurationMax());
assertEquals(2, _connector.getRequests());
assertEquals(2.0, _connector.getConnectionsRequestsMean(), 0.01);
assertEquals(2, _connector.getConnectionsRequestsMax());
assertTrue(_connector.getConnectionsRequestsMean() <= _connector.getConnectionsRequestsMax());
}
@Test
public void testMultipleConnections() throws Exception
{
doInit(3);
sendRequest(1, 1); // request 1 connection 1
sendRequest(2, 2); // request 1 connection 2
sendRequest(3, 3); // request 1 connection 3
sendRequest(2, 3); // request 2 connection 2
sendRequest(3, 3); // request 2 connection 3
sendRequest(3, 3); // request 3 connection 3
doClose(3);
assertEquals(3, _connector.getConnections());
assertEquals(0, _connector.getConnectionsOpen());
assertEquals(3, _connector.getConnectionsOpenMax());
assertTrue(_connector.getConnectionsOpen() <= _connector.getConnectionsOpenMax());
assertTrue(_connector.getConnectionsDurationMean() > 0);
assertTrue(_connector.getConnectionsDurationMax() > 0);
assertTrue(_connector.getConnectionsDurationMean() <= _connector.getConnectionsDurationMax());
assertEquals(6, _connector.getRequests());
assertEquals(2.0, _connector.getConnectionsRequestsMean(), 0.01);
assertEquals(3, _connector.getConnectionsRequestsMax());
assertTrue(_connector.getConnectionsRequestsMean() <= _connector.getConnectionsRequestsMax());
}
protected void doInit(int count)
{
_socket = new Socket[count];
_out = new PrintWriter[count];
_in = new BufferedReader[count];
_closed = new CountDownLatch(count);
}
private void doClose(int count) throws Exception
{
for (int idx=0; idx < count; idx++)
{
if (_socket[idx] != null)
_socket[idx].close();
}
_closed.await();
}
private void sendRequest(int id, int count) throws Exception
{
int idx = id - 1;
if (idx < 0)
throw new IllegalArgumentException("Connection ID <= 0");
_socket[idx] = _socket[idx] == null ? new Socket("localhost", _connector.getLocalPort()) : _socket[idx];
_out[idx] = _out[idx] == null ? new PrintWriter(_socket[idx].getOutputStream(), true) : _out[idx];
_in[idx] = _in[idx] == null ? new BufferedReader(new InputStreamReader(_socket[idx].getInputStream())) : _in[idx];
_connect.reset();
_out[idx].write("GET / HTTP/1.1\r\nHost: localhost\r\n\r\n");
_out[idx].flush();
_connect.await();
assertEquals(count, _connector.getConnectionsOpen());
while(_in[idx].ready())
{
_in[idx].readLine();
}
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import io.netty.handler.ssl.ApplicationProtocolConfig.Protocol;
import io.netty.handler.ssl.ApplicationProtocolConfig.SelectedListenerFailureBehavior;
import io.netty.handler.ssl.ApplicationProtocolConfig.SelectorFailureBehavior;
import io.netty.handler.ssl.JdkApplicationProtocolNegotiator.ProtocolSelector;
import io.netty.handler.ssl.JdkApplicationProtocolNegotiator.ProtocolSelectorFactory;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
import io.netty.handler.ssl.util.SelfSignedCertificate;
import java.security.Provider;
import java.util.ArrayList;
import java.util.Collection;
import io.netty.util.internal.EmptyArrays;
import io.netty.util.internal.PlatformDependent;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLHandshakeException;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeNoException;
@RunWith(Parameterized.class)
public class JdkSslEngineTest extends SSLEngineTest {
public enum ProviderType {
NPN_JETTY {
@Override
boolean isAvailable() {
return JettyNpnSslEngine.isAvailable();
}
@Override
Protocol protocol() {
return Protocol.NPN;
}
@Override
Provider provider() {
return null;
}
},
ALPN_JETTY {
@Override
boolean isAvailable() {
return JettyAlpnSslEngine.isAvailable();
}
@Override
Protocol protocol() {
return Protocol.ALPN;
}
@Override
Provider provider() {
// Use the default provider.
return null;
}
},
ALPN_JAVA9 {
@Override
boolean isAvailable() {
return PlatformDependent.javaVersion() >= 9 && Java9SslUtils.supportsAlpn();
}
@Override
Protocol protocol() {
return Protocol.ALPN;
}
@Override
Provider provider() {
// Use the default provider.
return null;
}
},
ALPN_CONSCRYPT {
private Provider provider;
@Override
boolean isAvailable() {
return Conscrypt.isAvailable();
}
@Override
Protocol protocol() {
return Protocol.ALPN;
}
@Override
Provider provider() {
try {
if (provider == null) {
provider = (Provider) Class.forName("org.conscrypt.OpenSSLProvider")
.getConstructor().newInstance();
}
return provider;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
};
abstract boolean isAvailable();
abstract Protocol protocol();
abstract Provider provider();
final void activate(JdkSslEngineTest instance) {
// Typical code will not have to check this, but will get a initialization error on class load.
// Check in this test just in case we have multiple tests that just the class and we already ignored the
// initialization error.
if (!isAvailable()) {
throw tlsExtensionNotFound(protocol());
}
instance.provider = provider();
}
}
private static final String PREFERRED_APPLICATION_LEVEL_PROTOCOL = "my-protocol-http2";
private static final String FALLBACK_APPLICATION_LEVEL_PROTOCOL = "my-protocol-http1_1";
private static final String APPLICATION_LEVEL_PROTOCOL_NOT_COMPATIBLE = "my-protocol-FOO";
@Parameterized.Parameters(name = "{index}: providerType = {0}, bufferType = {1}, combo = {2}, delegate = {3}")
public static Collection<Object[]> data() {
List<Object[]> params = new ArrayList<Object[]>();
for (ProviderType providerType : ProviderType.values()) {
for (BufferType bufferType : BufferType.values()) {
params.add(new Object[]{ providerType, bufferType, ProtocolCipherCombo.tlsv12(), true });
params.add(new Object[]{ providerType, bufferType, ProtocolCipherCombo.tlsv12(), false });
if (PlatformDependent.javaVersion() >= 11) {
params.add(new Object[] { providerType, bufferType, ProtocolCipherCombo.tlsv13(), true });
params.add(new Object[] { providerType, bufferType, ProtocolCipherCombo.tlsv13(), false });
}
}
}
return params;
}
private final ProviderType providerType;
private Provider provider;
public JdkSslEngineTest(ProviderType providerType, BufferType bufferType,
ProtocolCipherCombo protocolCipherCombo, boolean delegate) {
super(bufferType, protocolCipherCombo, delegate);
this.providerType = providerType;
}
@Test
public void testTlsExtension() throws Exception {
try {
providerType.activate(this);
ApplicationProtocolConfig apn = failingNegotiator(providerType.protocol(),
PREFERRED_APPLICATION_LEVEL_PROTOCOL);
setupHandlers(apn);
runTest();
} catch (SkipTestException e) {
// ALPN availability is dependent on the java version. If ALPN is not available because of
// java version incompatibility don't fail the test, but instead just skip the test
assumeNoException(e);
}
}
@Test
public void testTlsExtensionNoCompatibleProtocolsNoHandshakeFailure() throws Exception {
try {
providerType.activate(this);
ApplicationProtocolConfig clientApn = acceptingNegotiator(providerType.protocol(),
PREFERRED_APPLICATION_LEVEL_PROTOCOL);
ApplicationProtocolConfig serverApn = acceptingNegotiator(providerType.protocol(),
APPLICATION_LEVEL_PROTOCOL_NOT_COMPATIBLE);
setupHandlers(serverApn, clientApn);
runTest(null);
} catch (SkipTestException e) {
// ALPN availability is dependent on the java version. If ALPN is not available because of
// java version incompatibility don't fail the test, but instead just skip the test
assumeNoException(e);
}
}
@Test
public void testTlsExtensionNoCompatibleProtocolsClientHandshakeFailure() throws Exception {
try {
providerType.activate(this);
if (providerType == ProviderType.NPN_JETTY) {
ApplicationProtocolConfig clientApn = failingNegotiator(providerType.protocol(),
PREFERRED_APPLICATION_LEVEL_PROTOCOL);
ApplicationProtocolConfig serverApn = acceptingNegotiator(providerType.protocol(),
APPLICATION_LEVEL_PROTOCOL_NOT_COMPATIBLE);
setupHandlers(serverApn, clientApn);
assertTrue(clientLatch.await(2, TimeUnit.SECONDS));
assertTrue(clientException instanceof SSLHandshakeException);
} else {
// ALPN
SelfSignedCertificate ssc = new SelfSignedCertificate();
JdkApplicationProtocolNegotiator clientApn = new JdkAlpnApplicationProtocolNegotiator(true, true,
PREFERRED_APPLICATION_LEVEL_PROTOCOL);
JdkApplicationProtocolNegotiator serverApn = new JdkAlpnApplicationProtocolNegotiator(
new ProtocolSelectorFactory() {
@Override
public ProtocolSelector newSelector(SSLEngine engine, Set<String> supportedProtocols) {
return new ProtocolSelector() {
@Override
public void unsupported() {
}
@Override
public String select(List<String> protocols) {
return APPLICATION_LEVEL_PROTOCOL_NOT_COMPATIBLE;
}
};
}
}, JdkBaseApplicationProtocolNegotiator.FAIL_SELECTION_LISTENER_FACTORY,
APPLICATION_LEVEL_PROTOCOL_NOT_COMPATIBLE);
SslContext serverSslCtx = new JdkSslServerContext(providerType.provider(),
ssc.certificate(), ssc.privateKey(), null, null,
IdentityCipherSuiteFilter.INSTANCE, serverApn, 0, 0);
SslContext clientSslCtx = new JdkSslClientContext(providerType.provider(), null,
InsecureTrustManagerFactory.INSTANCE, null,
IdentityCipherSuiteFilter.INSTANCE, clientApn, 0, 0);
setupHandlers(new TestDelegatingSslContext(serverSslCtx), new TestDelegatingSslContext(clientSslCtx));
assertTrue(clientLatch.await(2, TimeUnit.SECONDS));
// When using TLSv1.3 the handshake is NOT sent in an extra round trip which means there will be
// no exception reported in this case but just the channel will be closed.
assertTrue(clientException instanceof SSLHandshakeException || clientException == null);
}
} catch (SkipTestException e) {
// ALPN availability is dependent on the java version. If ALPN is not available because of
// java version incompatibility don't fail the test, but instead just skip the test
assumeNoException(e);
}
}
@Test
public void testTlsExtensionNoCompatibleProtocolsServerHandshakeFailure() throws Exception {
try {
providerType.activate(this);
ApplicationProtocolConfig clientApn = acceptingNegotiator(providerType.protocol(),
PREFERRED_APPLICATION_LEVEL_PROTOCOL);
ApplicationProtocolConfig serverApn = failingNegotiator(providerType.protocol(),
APPLICATION_LEVEL_PROTOCOL_NOT_COMPATIBLE);
setupHandlers(serverApn, clientApn);
assertTrue(serverLatch.await(2, TimeUnit.SECONDS));
assertTrue(serverException instanceof SSLHandshakeException);
} catch (SkipTestException e) {
// ALPN availability is dependent on the java version. If ALPN is not available because of
// java version incompatibility don't fail the test, but instead just skip the test
assumeNoException(e);
}
}
@Test
public void testAlpnCompatibleProtocolsDifferentClientOrder() throws Exception {
try {
providerType.activate(this);
if (providerType == ProviderType.NPN_JETTY) {
// This test only applies to ALPN.
throw tlsExtensionNotFound(providerType.protocol());
}
// Even the preferred application protocol appears second in the client's list, it will be picked
// because it's the first one on server's list.
ApplicationProtocolConfig clientApn = acceptingNegotiator(Protocol.ALPN,
FALLBACK_APPLICATION_LEVEL_PROTOCOL, PREFERRED_APPLICATION_LEVEL_PROTOCOL);
ApplicationProtocolConfig serverApn = failingNegotiator(Protocol.ALPN,
PREFERRED_APPLICATION_LEVEL_PROTOCOL, FALLBACK_APPLICATION_LEVEL_PROTOCOL);
setupHandlers(serverApn, clientApn);
assertNull(serverException);
runTest(PREFERRED_APPLICATION_LEVEL_PROTOCOL);
} catch (SkipTestException e) {
// ALPN availability is dependent on the java version. If ALPN is not available because of
// java version incompatibility don't fail the test, but instead just skip the test
assumeNoException(e);
}
}
@Test
public void testEnablingAnAlreadyDisabledSslProtocol() throws Exception {
testEnablingAnAlreadyDisabledSslProtocol(new String[]{}, new String[]{ SslUtils.PROTOCOL_TLS_V1_2 });
}
@Ignore /* Does the JDK support a "max certificate chain length"? */
@Override
public void testMutualAuthValidClientCertChainTooLongFailOptionalClientAuth() throws Exception {
}
@Ignore /* Does the JDK support a "max certificate chain length"? */
@Override
public void testMutualAuthValidClientCertChainTooLongFailRequireClientAuth() throws Exception {
}
@Override
protected boolean mySetupMutualAuthServerIsValidException(Throwable cause) {
// TODO(scott): work around for a JDK issue. The exception should be SSLHandshakeException.
return super.mySetupMutualAuthServerIsValidException(cause) || causedBySSLException(cause);
}
private void runTest() throws Exception {
runTest(PREFERRED_APPLICATION_LEVEL_PROTOCOL);
}
@Override
protected SslProvider sslClientProvider() {
return SslProvider.JDK;
}
@Override
protected SslProvider sslServerProvider() {
return SslProvider.JDK;
}
@Override
protected Provider clientSslContextProvider() {
return provider;
}
@Override
protected Provider serverSslContextProvider() {
return provider;
}
private static ApplicationProtocolConfig failingNegotiator(Protocol protocol, String... supportedProtocols) {
return new ApplicationProtocolConfig(protocol,
SelectorFailureBehavior.FATAL_ALERT,
SelectedListenerFailureBehavior.FATAL_ALERT,
supportedProtocols);
}
private static ApplicationProtocolConfig acceptingNegotiator(Protocol protocol, String... supportedProtocols) {
return new ApplicationProtocolConfig(protocol,
SelectorFailureBehavior.NO_ADVERTISE,
SelectedListenerFailureBehavior.ACCEPT,
supportedProtocols);
}
private static SkipTestException tlsExtensionNotFound(Protocol protocol) {
throw new SkipTestException(protocol + " not on classpath");
}
private static final class SkipTestException extends RuntimeException {
private static final long serialVersionUID = 9214869217774035223L;
SkipTestException(String message) {
super(message);
}
}
private final class TestDelegatingSslContext extends DelegatingSslContext {
TestDelegatingSslContext(SslContext ctx) {
super(ctx);
}
@Override
protected void initEngine(SSLEngine engine) {
engine.setEnabledProtocols(protocols());
engine.setEnabledCipherSuites(ciphers().toArray(EmptyArrays.EMPTY_STRINGS));
}
}
}
| |
package mods.themike.modjam.tile;
import com.google.common.collect.ObjectArrays;
import mods.themike.modjam.ModJam;
import mods.themike.modjam.api.runes.IRune;
import mods.themike.modjam.api.runes.RuneRegistry;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
public class TileEntityCarvingStone extends TileEntity implements IInventory {
private ItemStack[] inventory = new ItemStack[7];
public TileEntityCarvingStone() {
}
@Override
public int getSizeInventory() {
return 7;
}
@Override
public ItemStack getStackInSlot(int i) {
return inventory[i];
}
@Override
public ItemStack decrStackSize(int i, int j) {
ItemStack stack;
if(i >= 3) {
if(inventory[0] != null && inventory[1] != null && inventory[2] != null) {
remove(i);
} else {
return null;
}
}
if(inventory[i] != null) {
if(inventory[i].stackSize - j > 0) {
stack = inventory[i].splitStack(j);
if(inventory[i].stackSize == 0) {
inventory[i] = null;
}
update();
return stack;
} else {
stack = inventory[i];
inventory[i] = null;
update();
return stack;
}
}
return null;
}
@Override
public ItemStack getStackInSlotOnClosing(int i) {
return inventory[i];
}
public void update() {
if(inventory[0] != null && inventory[1] != null && inventory[2] != null) {
Integer[] runes = new Integer[]{};
for(int var1 = 1; var1 != RuneRegistry.getrunes().size(); var1++) {
if(RuneRegistry.getrunes().get(var1).getSacrifice() == inventory[0].getItem()) {
runes = ObjectArrays.concat(var1, runes);
}
}
if(runes.length >= 1) {
NBTTagCompound tag = new NBTTagCompound();
tag.setInteger("uses", RuneRegistry.getrunes().get(runes[0]).getUses());
ItemStack stack = new ItemStack(ModJam.runes, 1, runes[0]);
stack.setTagCompound(tag);
inventory[6] = stack;
}
if(runes.length >= 2) {
NBTTagCompound tag = new NBTTagCompound();
tag.setInteger("uses", RuneRegistry.getrunes().get(runes[1]).getUses());
ItemStack stack = new ItemStack(ModJam.runes, 1, runes[1]);
stack.setTagCompound(tag);
inventory[5] = stack;
}
if(runes.length >= 3) {
NBTTagCompound tag = new NBTTagCompound();
tag.setInteger("uses", RuneRegistry.getrunes().get(runes[2]).getUses());
ItemStack stack = new ItemStack(ModJam.runes, 1, runes[2]);
stack.setTagCompound(tag);
inventory[3] = stack;
}
if(runes.length >= 4) {
NBTTagCompound tag = new NBTTagCompound();
tag.setInteger("uses", RuneRegistry.getrunes().get(runes[3]).getUses());
ItemStack stack = new ItemStack(ModJam.runes, 1, runes[3]);
stack.setTagCompound(tag);
inventory[4] = stack;
}
} else {
inventory[3] = null;
inventory[4] = null;
inventory[5] = null;
inventory[6] = null;
}
}
public void remove(int i) {
if(inventory[0] != null && inventory[1] != null && inventory[2] != null) {
inventory[0].splitStack(1);
if(inventory[0].stackSize == 0) {
inventory[0] = null;
}
inventory[1].splitStack(1);
if(inventory[1].stackSize == 0) {
inventory[1] = null;
}
inventory[2].splitStack(1);
if(inventory[2].stackSize == 0) {
inventory[2] = null;
}
if(i != 3) {
inventory[3] = null;
}
if(i != 4) {
inventory[4] = null;
}
if(i != 5) {
inventory[5] = null;
}
if(i != 6) {
inventory[6] = null;
}
}
}
@Override
public void setInventorySlotContents(int i, ItemStack itemstack) {
inventory[i] = itemstack;
this.update();
}
@Override
public String getInvName() {
return "Carving Stone";
}
@Override
public boolean isInvNameLocalized() {
return true;
}
@Override
public int getInventoryStackLimit() {
return 64;
}
@Override
public boolean isUseableByPlayer(EntityPlayer entityplayer) {
return true;
}
@Override
public void openChest() {
}
@Override
public void closeChest() {
}
@Override
public boolean isItemValidForSlot(int i, ItemStack itemstack) {
return true;
}
@Override
public void readFromNBT(NBTTagCompound tag) {
super.readFromNBT(tag);
for(int par1 = 0; par1 != inventory.length; par1++) {
if(tag.getTag(String.valueOf(par1)) != null) {
inventory[par1] = ItemStack.loadItemStackFromNBT((NBTTagCompound) tag.getTag(String.valueOf(par1)));
}
}
}
@Override
public void writeToNBT(NBTTagCompound tag) {
super.writeToNBT(tag);
for(int par1 = 0; par1 != inventory.length; par1++) {
NBTTagCompound com = new NBTTagCompound();
if(inventory[par1] != null) {
inventory[par1].writeToNBT(com);
tag.setTag(String.valueOf(par1), com);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.nifi.processors.mongodb;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.BasicDBObject;
import com.mongodb.client.AggregateIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import org.apache.nifi.annotation.behavior.EventDriven;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.JsonValidator;
import org.apache.nifi.processor.util.StandardValidators;
import org.bson.Document;
import org.bson.conversions.Bson;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@Tags({"mongo", "aggregation", "aggregate"})
@CapabilityDescription("A processor that runs an aggregation query whenever a flowfile is received.")
@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED)
@EventDriven
public class RunMongoAggregation extends AbstractMongoProcessor {
private final static Set<Relationship> relationships;
private final static List<PropertyDescriptor> propertyDescriptors;
static final Relationship REL_ORIGINAL = new Relationship.Builder()
.description("The input flowfile gets sent to this relationship when the query succeeds.")
.name("original")
.build();
static final Relationship REL_FAILURE = new Relationship.Builder()
.description("The input flowfile gets sent to this relationship when the query fails.")
.name("failure")
.build();
static final Relationship REL_RESULTS = new Relationship.Builder()
.description("The result set of the aggregation will be sent to this relationship.")
.name("results")
.build();
static final List<Bson> buildAggregationQuery(String query) throws IOException {
List<Bson> result = new ArrayList<>();
ObjectMapper mapper = new ObjectMapper();
List<Map> querySteps = mapper.readValue(query, List.class);
for (Map<?, ?> queryStep : querySteps) {
BasicDBObject bson = BasicDBObject.parse(mapper.writeValueAsString(queryStep));
result.add(bson);
}
return result;
}
static final PropertyDescriptor QUERY = new PropertyDescriptor.Builder()
.name("mongo-agg-query")
.displayName("Query")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.description("The aggregation query to be executed.")
.required(true)
.addValidator(JsonValidator.INSTANCE)
.build();
static final PropertyDescriptor ALLOW_DISK_USE = new PropertyDescriptor.Builder()
.name("allow-disk-use")
.displayName("Allow Disk Use")
.description("Set this to true to enable writing data to temporary files to prevent exceeding the " +
"maximum memory use limit during aggregation pipeline staged when handling large datasets.")
.required(true)
.allowableValues("true", "false")
.defaultValue("false")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
static {
List<PropertyDescriptor> _propertyDescriptors = new ArrayList<>();
_propertyDescriptors.addAll(descriptors);
_propertyDescriptors.add(CHARSET);
_propertyDescriptors.add(QUERY);
_propertyDescriptors.add(ALLOW_DISK_USE);
_propertyDescriptors.add(JSON_TYPE);
_propertyDescriptors.add(QUERY_ATTRIBUTE);
_propertyDescriptors.add(BATCH_SIZE);
_propertyDescriptors.add(RESULTS_PER_FLOWFILE);
_propertyDescriptors.add(DATE_FORMAT);
_propertyDescriptors.add(SSL_CONTEXT_SERVICE);
_propertyDescriptors.add(CLIENT_AUTH);
propertyDescriptors = Collections.unmodifiableList(_propertyDescriptors);
final Set<Relationship> _relationships = new HashSet<>();
_relationships.add(REL_RESULTS);
_relationships.add(REL_ORIGINAL);
_relationships.add(REL_FAILURE);
relationships = Collections.unmodifiableSet(_relationships);
}
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
@Override
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return propertyDescriptors;
}
private String buildBatch(List<Document> batch) {
String retVal;
try {
retVal = objectMapper.writeValueAsString(batch.size() > 1 ? batch : batch.get(0));
} catch (Exception e) {
retVal = null;
}
return retVal;
}
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
FlowFile flowFile = null;
if (context.hasIncomingConnection()) {
flowFile = session.get();
if (flowFile == null && context.hasNonLoopConnection()) {
return;
}
}
final String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile).getValue();
final Boolean allowDiskUse = context.getProperty(ALLOW_DISK_USE).asBoolean();
final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue();
final Integer batchSize = context.getProperty(BATCH_SIZE).asInteger();
final Integer resultsPerFlowfile = context.getProperty(RESULTS_PER_FLOWFILE).asInteger();
final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue();
final String dateFormat = context.getProperty(DATE_FORMAT).evaluateAttributeExpressions(flowFile).getValue();
configureMapper(jsonTypeSetting, dateFormat);
Map<String, String> attrs = new HashMap<>();
if (queryAttr != null && queryAttr.trim().length() > 0) {
attrs.put(queryAttr, query);
}
MongoCursor<Document> iter = null;
try {
MongoCollection<Document> collection = getCollection(context, flowFile);
List<Bson> aggQuery = buildAggregationQuery(query);
AggregateIterable<Document> it = collection.aggregate(aggQuery).allowDiskUse(allowDiskUse);;
it.batchSize(batchSize != null ? batchSize : 1);
iter = it.iterator();
List<Document> batch = new ArrayList<>();
Boolean doneSomething = false;
while (iter.hasNext()) {
batch.add(iter.next());
if (batch.size() == resultsPerFlowfile) {
writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
batch = new ArrayList<>();
doneSomething |= true;
}
}
if (! batch.isEmpty()) {
// Something remains in batch list, write it to RESULT
writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
} else if (! doneSomething) {
// The batch list is empty and no batch was written (empty result!), so write empty string to RESULT
writeBatch("", flowFile, context, session, attrs, REL_RESULTS);
}
if (flowFile != null) {
session.transfer(flowFile, REL_ORIGINAL);
}
} catch (Exception e) {
getLogger().error("Error running MongoDB aggregation query.", e);
if (flowFile != null) {
session.transfer(flowFile, REL_FAILURE);
}
} finally {
if (iter != null) {
iter.close();
}
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.http;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.net.HostAndPort;
import com.google.inject.Inject;
import com.sun.jersey.spi.container.ResourceFilters;
import io.druid.audit.AuditInfo;
import io.druid.audit.AuditManager;
import io.druid.common.utils.ServletResourceUtils;
import io.druid.guice.annotations.Json;
import io.druid.guice.annotations.Smile;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.RE;
import io.druid.java.util.common.logger.Logger;
import io.druid.query.lookup.LookupsState;
import io.druid.server.http.security.ConfigResourceFilter;
import io.druid.server.lookup.cache.LookupCoordinatorManager;
import io.druid.server.lookup.cache.LookupExtractorFactoryMapContainer;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Contains information about lookups exposed through the coordinator
*/
@Path("/druid/coordinator/v1/lookups")
@ResourceFilters(ConfigResourceFilter.class)
public class LookupCoordinatorResource
{
private static final Logger LOG = new Logger(LookupCoordinatorResource.class);
private final LookupCoordinatorManager lookupCoordinatorManager;
private final ObjectMapper smileMapper;
private final ObjectMapper jsonMapper;
@Inject
public LookupCoordinatorResource(
final LookupCoordinatorManager lookupCoordinatorManager,
final @Smile ObjectMapper smileMapper,
final @Json ObjectMapper jsonMapper
)
{
this.smileMapper = smileMapper;
this.jsonMapper = jsonMapper;
this.lookupCoordinatorManager = lookupCoordinatorManager;
}
@GET
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
public Response getTiers(
@DefaultValue("false") @QueryParam("discover") boolean discover
)
{
try {
if (discover) {
return Response.ok().entity(lookupCoordinatorManager.discoverTiers()).build();
}
final Map<String, Map<String, LookupExtractorFactoryMapContainer>> knownLookups = lookupCoordinatorManager.getKnownLookups();
if (knownLookups == null) {
return Response.status(Response.Status.NOT_FOUND).build();
} else {
return Response.ok().entity(knownLookups.keySet()).build();
}
}
catch (Exception e) {
LOG.error(e, "Error getting list of lookups");
return Response.serverError().entity(ServletResourceUtils.sanitizeException(e)).build();
}
}
@POST
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
@Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
public Response updateAllLookups(
InputStream in,
@HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author,
@HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment,
@Context HttpServletRequest req
)
{
try {
final boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(req.getContentType());
final ObjectMapper mapper = isSmile ? smileMapper : jsonMapper;
final Map<String, Map<String, LookupExtractorFactoryMapContainer>> map;
try {
map = mapper.readValue(in, new TypeReference<Map<String, Map<String, LookupExtractorFactoryMapContainer>>>()
{
});
}
catch (IOException e) {
return Response.status(Response.Status.BAD_REQUEST).entity(ServletResourceUtils.sanitizeException(e)).build();
}
if (lookupCoordinatorManager.updateLookups(map, new AuditInfo(author, comment, req.getRemoteAddr()))) {
return Response.status(Response.Status.ACCEPTED).entity(map).build();
} else {
throw new RuntimeException("Unknown error updating configuration");
}
}
catch (Exception e) {
LOG.error(e, "Error creating new lookups");
return Response.serverError().entity(ServletResourceUtils.sanitizeException(e)).build();
}
}
@DELETE
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
@Path("/{tier}/{lookup}")
public Response deleteLookup(
@PathParam("tier") String tier,
@PathParam("lookup") String lookup,
@HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author,
@HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment,
@Context HttpServletRequest req
)
{
try {
if (Strings.isNullOrEmpty(tier)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ServletResourceUtils.sanitizeException(new NullPointerException("`tier` required")))
.build();
}
if (Strings.isNullOrEmpty(lookup)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ServletResourceUtils.sanitizeException(new IAE("`lookup` required")))
.build();
}
if (lookupCoordinatorManager.deleteLookup(tier, lookup, new AuditInfo(author, comment, req.getRemoteAddr()))) {
return Response.status(Response.Status.ACCEPTED).build();
} else {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
catch (Exception e) {
LOG.error(e, "Error deleting lookup [%s]", lookup);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(e)).build();
}
}
@POST
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
@Path("/{tier}/{lookup}")
public Response createOrUpdateLookup(
@PathParam("tier") String tier,
@PathParam("lookup") String lookup,
@HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author,
@HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment,
InputStream in,
@Context HttpServletRequest req
)
{
try {
if (Strings.isNullOrEmpty(tier)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ServletResourceUtils.sanitizeException(new NullPointerException("`tier` required")))
.build();
}
if (Strings.isNullOrEmpty(lookup)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ServletResourceUtils.sanitizeException(new IAE("`lookup` required")))
.build();
}
final boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(req.getContentType());
final ObjectMapper mapper = isSmile ? smileMapper : jsonMapper;
final LookupExtractorFactoryMapContainer lookupSpec;
try {
lookupSpec = mapper.readValue(in, LookupExtractorFactoryMapContainer.class);
}
catch (IOException e) {
return Response.status(Response.Status.BAD_REQUEST).entity(ServletResourceUtils.sanitizeException(e)).build();
}
if (lookupCoordinatorManager.updateLookup(
tier,
lookup,
lookupSpec,
new AuditInfo(author, comment, req.getRemoteAddr())
)) {
return Response.status(Response.Status.ACCEPTED).build();
} else {
throw new RuntimeException("Unknown error updating configuration");
}
}
catch (Exception e) {
LOG.error(e, "Error updating tier [%s] lookup [%s]", tier, lookup);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(e)).build();
}
}
@GET
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
@Path("/{tier}/{lookup}")
public Response getSpecificLookup(
@PathParam("tier") String tier,
@PathParam("lookup") String lookup
)
{
try {
if (Strings.isNullOrEmpty(tier)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ServletResourceUtils.sanitizeException(new NullPointerException("`tier` required")))
.build();
}
if (Strings.isNullOrEmpty(lookup)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ServletResourceUtils.sanitizeException(new NullPointerException("`lookup` required")))
.build();
}
final LookupExtractorFactoryMapContainer map = lookupCoordinatorManager.getLookup(tier, lookup);
if (map == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.sanitizeException(new RE("lookup [%s] not found", lookup)))
.build();
}
return Response.ok().entity(map).build();
}
catch (Exception e) {
LOG.error(e, "Error getting lookup [%s]", lookup);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(e)).build();
}
}
@GET
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
@Path("/{tier}")
public Response getSpecificTier(
@PathParam("tier") String tier
)
{
try {
if (Strings.isNullOrEmpty(tier)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ServletResourceUtils.sanitizeException(new NullPointerException("`tier` required")))
.build();
}
final Map<String, Map<String, LookupExtractorFactoryMapContainer>> map = lookupCoordinatorManager.getKnownLookups();
if (map == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.sanitizeException(new RE("No lookups found")))
.build();
}
final Map<String, LookupExtractorFactoryMapContainer> tierLookups = map.get(tier);
if (tierLookups == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.sanitizeException(new RE("Tier [%s] not found", tier)))
.build();
}
return Response.ok().entity(tierLookups.keySet()).build();
}
catch (Exception e) {
LOG.error(e, "Error getting tier [%s]", tier);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(e)).build();
}
}
@GET
@Produces({MediaType.APPLICATION_JSON})
@Path("/status")
public Response getAllLookupsStatus(
@QueryParam("detailed") boolean detailed
)
{
try {
Map<String, Map<String, LookupExtractorFactoryMapContainer>> configuredLookups = lookupCoordinatorManager.getKnownLookups();
if (configuredLookups == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("No lookups found"))
.build();
}
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> lookupsStateOnNodes = lookupCoordinatorManager.getLastKnownLookupsStateOnNodes();
Map<String, Map<String, LookupStatus>> result = new HashMap<>();
for (Map.Entry<String, Map<String, LookupExtractorFactoryMapContainer>> tierEntry : configuredLookups.entrySet()) {
String tier = tierEntry.getKey();
Map<String, LookupStatus> lookupStatusMap = new HashMap<>();
result.put(tier, lookupStatusMap);
Collection<HostAndPort> hosts = lookupCoordinatorManager.discoverNodesInTier(tier);
for (Map.Entry<String, LookupExtractorFactoryMapContainer> lookupsEntry : tierEntry.getValue().entrySet()) {
lookupStatusMap.put(
lookupsEntry.getKey(),
getLookupStatus(
lookupsEntry.getKey(),
lookupsEntry.getValue(),
hosts,
lookupsStateOnNodes,
detailed
)
);
}
}
return Response.ok(result).build();
}
catch (Exception ex) {
LOG.error(ex, "Error getting lookups status");
return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build();
}
}
@GET
@Produces({MediaType.APPLICATION_JSON})
@Path("/status/{tier}")
public Response getLookupStatusForTier(
@PathParam("tier") String tier,
@QueryParam("detailed") boolean detailed
)
{
try {
Map<String, Map<String, LookupExtractorFactoryMapContainer>> configuredLookups = lookupCoordinatorManager.getKnownLookups();
if (configuredLookups == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("No lookups found"))
.build();
}
Map<String, LookupExtractorFactoryMapContainer> tierLookups = configuredLookups.get(tier);
if (tierLookups == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("No lookups found for tier [%s].", tier))
.build();
}
Map<String, LookupStatus> lookupStatusMap = new HashMap<>();
Collection<HostAndPort> hosts = lookupCoordinatorManager.discoverNodesInTier(tier);
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> lookupsStateOnNodes = lookupCoordinatorManager.getLastKnownLookupsStateOnNodes();
for (Map.Entry<String, LookupExtractorFactoryMapContainer> lookupsEntry : tierLookups.entrySet()) {
lookupStatusMap.put(
lookupsEntry.getKey(),
getLookupStatus(lookupsEntry.getKey(), lookupsEntry.getValue(), hosts, lookupsStateOnNodes, detailed)
);
}
return Response.ok(lookupStatusMap).build();
}
catch (Exception ex) {
LOG.error(ex, "Error getting lookups status for tier [%s].", tier);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build();
}
}
@GET
@Produces({MediaType.APPLICATION_JSON})
@Path("/status/{tier}/{lookup}")
public Response getSpecificLookupStatus(
@PathParam("tier") String tier,
@PathParam("lookup") String lookup,
@QueryParam("detailed") boolean detailed
)
{
try {
Map<String, Map<String, LookupExtractorFactoryMapContainer>> configuredLookups = lookupCoordinatorManager.getKnownLookups();
if (configuredLookups == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("No lookups found"))
.build();
}
Map<String, LookupExtractorFactoryMapContainer> tierLookups = configuredLookups.get(tier);
if (tierLookups == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("No lookups found for tier [%s].", tier))
.build();
}
LookupExtractorFactoryMapContainer lookupDef = tierLookups.get(lookup);
if (lookupDef == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("Lookup [%s] not found for tier [%s].", lookup, tier))
.build();
}
return Response.ok(
getLookupStatus(
lookup,
lookupDef,
lookupCoordinatorManager.discoverNodesInTier(tier),
lookupCoordinatorManager.getLastKnownLookupsStateOnNodes(),
detailed
)
).build();
}
catch (Exception ex) {
LOG.error(ex, "Error getting lookups status for tier [%s] and lookup [%s].", tier, lookup);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build();
}
}
@VisibleForTesting
LookupStatus getLookupStatus(
String lookupName,
LookupExtractorFactoryMapContainer lookupDef,
Collection<HostAndPort> nodes,
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> lastKnownLookupsState,
boolean detailed
)
{
boolean isReady = true;
List<HostAndPort> pendingHosts = detailed ? new ArrayList<>() : null;
for (HostAndPort node : nodes) {
LookupsState<LookupExtractorFactoryMapContainer> hostState = lastKnownLookupsState.get(node);
LookupExtractorFactoryMapContainer loadedOnHost = hostState != null
? hostState.getCurrent().get(lookupName)
: null;
if (loadedOnHost == null || lookupDef.replaces(loadedOnHost)) {
isReady = false;
if (detailed) {
pendingHosts.add(node);
} else {
break;
}
}
}
return new LookupStatus(isReady, pendingHosts);
}
@GET
@Produces({MediaType.APPLICATION_JSON})
@Path("/nodeStatus")
public Response getAllNodesStatus(
@QueryParam("discover") boolean discover
)
{
try {
Collection<String> tiers = null;
if (discover) {
tiers = lookupCoordinatorManager.discoverTiers();
} else {
Map<String, Map<String, LookupExtractorFactoryMapContainer>> configuredLookups = lookupCoordinatorManager.getKnownLookups();
if (configuredLookups == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("No lookups configured."))
.build();
}
tiers = configuredLookups.keySet();
}
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> lookupsStateOnHosts = lookupCoordinatorManager.getLastKnownLookupsStateOnNodes();
Map<String, Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>>> result = new HashMap<>();
for (String tier : tiers) {
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> tierNodesStatus = new HashMap<>();
result.put(tier, tierNodesStatus);
Collection<HostAndPort> nodes = lookupCoordinatorManager.discoverNodesInTier(tier);
for (HostAndPort node : nodes) {
LookupsState<LookupExtractorFactoryMapContainer> lookupsState = lookupsStateOnHosts.get(node);
if (lookupsState == null) {
tierNodesStatus.put(node, new LookupsState<>(null, null, null));
} else {
tierNodesStatus.put(node, lookupsState);
}
}
}
return Response.ok(result).build();
}
catch (Exception ex) {
LOG.error(ex, "Error getting node status.");
return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build();
}
}
@GET
@Produces({MediaType.APPLICATION_JSON})
@Path("/nodeStatus/{tier}")
public Response getNodesStatusInTier(
@PathParam("tier") String tier
)
{
try {
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> lookupsStateOnHosts = lookupCoordinatorManager.getLastKnownLookupsStateOnNodes();
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> tierNodesStatus = new HashMap<>();
Collection<HostAndPort> nodes = lookupCoordinatorManager.discoverNodesInTier(tier);
for (HostAndPort node : nodes) {
LookupsState<LookupExtractorFactoryMapContainer> lookupsState = lookupsStateOnHosts.get(node);
if (lookupsState == null) {
tierNodesStatus.put(node, new LookupsState<>(null, null, null));
} else {
tierNodesStatus.put(node, lookupsState);
}
}
return Response.ok(tierNodesStatus).build();
}
catch (Exception ex) {
LOG.error(ex, "Error getting node status for tier [%s].", tier);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build();
}
}
@GET
@Produces({MediaType.APPLICATION_JSON})
@Path("/nodeStatus/{tier}/{hostAndPort}")
public Response getSpecificNodeStatus(
@PathParam("tier") String tier,
@PathParam("hostAndPort") HostAndPort hostAndPort
)
{
try {
Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>> lookupsStateOnHosts = lookupCoordinatorManager.getLastKnownLookupsStateOnNodes();
LookupsState<LookupExtractorFactoryMapContainer> lookupsState = lookupsStateOnHosts.get(hostAndPort);
if (lookupsState == null) {
return Response.status(Response.Status.NOT_FOUND)
.entity(ServletResourceUtils.jsonize("Node [%s] status is unknown.", hostAndPort))
.build();
} else {
return Response.ok(lookupsState).build();
}
}
catch (Exception ex) {
LOG.error(ex, "Error getting node status for [%s].", hostAndPort);
return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build();
}
}
@VisibleForTesting
static class LookupStatus
{
@JsonProperty
private boolean loaded;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_NULL)
private List<HostAndPort> pendingNodes;
public LookupStatus(boolean loaded, List<HostAndPort> pendingHosts)
{
this.loaded = loaded;
this.pendingNodes = pendingHosts;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LookupStatus that = (LookupStatus) o;
return Objects.equals(loaded, that.loaded) &&
Objects.equals(pendingNodes, that.pendingNodes);
}
@Override
public int hashCode()
{
return Objects.hash(loaded, pendingNodes);
}
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.identity.integration.common.clients.workflow.mgt;
import org.apache.axis2.AxisFault;
import org.apache.axis2.client.Options;
import org.apache.axis2.client.ServiceClient;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.workflow.impl.stub.WorkflowImplAdminServiceStub;
import org.wso2.carbon.identity.workflow.impl.stub.WorkflowImplAdminServiceWorkflowImplException;
import org.wso2.carbon.identity.workflow.impl.stub.bean.BPSProfile;
import org.wso2.carbon.identity.workflow.mgt.stub.bean.WorkflowRequest;
import org.wso2.carbon.identity.workflow.mgt.stub.bean.WorkflowRequestAssociation;
import org.wso2.carbon.identity.workflow.mgt.stub.WorkflowAdminServiceStub;
import org.wso2.carbon.identity.workflow.mgt.stub.WorkflowAdminServiceWorkflowException;
import org.wso2.carbon.identity.workflow.mgt.stub.metadata.Association;
import org.wso2.carbon.identity.workflow.mgt.stub.metadata.Template;
import org.wso2.carbon.identity.workflow.mgt.stub.metadata.WorkflowEvent;
import org.wso2.carbon.identity.workflow.mgt.stub.metadata.WorkflowImpl;
import org.wso2.carbon.identity.workflow.mgt.stub.metadata.WorkflowWizard;
import org.wso2.carbon.identity.workflow.mgt.stub.metadata.bean.ParametersMetaData;
import java.rmi.RemoteException;
import java.util.List;
public class WorkflowAdminClient {
private WorkflowAdminServiceStub stub;
private WorkflowImplAdminServiceStub stubImpl;
private static final Log log = LogFactory.getLog(WorkflowAdminClient.class);
/**
* @param cookie
* @param backendServerURL
* @param configCtx
* @throws AxisFault
*/
public WorkflowAdminClient(String cookie, String backendServerURL,
ConfigurationContext configCtx) throws AxisFault {
String serviceURL = backendServerURL + "WorkflowAdminService";
stub = new WorkflowAdminServiceStub(configCtx, serviceURL);
serviceURL = backendServerURL + "WorkflowImplAdminService";
stubImpl = new WorkflowImplAdminServiceStub(configCtx, serviceURL);
ServiceClient client = stub._getServiceClient();
Options option = client.getOptions();
option.setManageSession(true);
option.setProperty(org.apache.axis2.transport.http.HTTPConstants.COOKIE_STRING, cookie);
ServiceClient clientImpl = stubImpl._getServiceClient();
Options optionImpl = clientImpl.getOptions();
optionImpl.setManageSession(true);
optionImpl.setProperty(org.apache.axis2.transport.http.HTTPConstants.COOKIE_STRING, cookie);
}
public WorkflowEvent[] listWorkflowEvents() throws RemoteException {
WorkflowEvent[] workflowEvents = stub.listWorkflowEvents();
if (workflowEvents == null) {
workflowEvents = new WorkflowEvent[0];
}
return workflowEvents;
}
public Template[] listTemplates() throws RemoteException, WorkflowAdminServiceWorkflowException {
Template[] templates = stub.listTemplates();
if (templates == null) {
templates = new Template[0];
}
return templates;
}
public Template getTemplate(String templateId) throws RemoteException, WorkflowAdminServiceWorkflowException {
Template templateDTO = stub.getTemplate(templateId);
if (templateDTO != null) {
if (templateDTO.getParametersMetaData() == null) {
templateDTO.setParametersMetaData(new ParametersMetaData());
}
}
return templateDTO;
}
public WorkflowImpl getWorkflowImpl(String templateId, String implId)
throws RemoteException, WorkflowAdminServiceWorkflowException {
WorkflowImpl workflowImpl = stub.getWorkflowImpl(templateId, implId);
if (workflowImpl != null) {
if (workflowImpl.getParametersMetaData() == null) {
workflowImpl.setParametersMetaData(new ParametersMetaData());
}
}
return workflowImpl;
}
/**
* Add new workflow
*
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public void addWorkflow(WorkflowWizard workflow) throws RemoteException, WorkflowAdminServiceWorkflowException {
stub.addWorkflow(workflow);
}
/**
* Add new BPS profile
*
* @param bpsProfileDTO
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public void addBPSProfile(BPSProfile bpsProfileDTO) throws RemoteException,
WorkflowAdminServiceWorkflowException, WorkflowImplAdminServiceWorkflowImplException {
stubImpl.addBPSProfile(bpsProfileDTO);
}
/**
* Retrieve BPS Profiles
*
* @return
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public BPSProfile[] listBPSProfiles()
throws RemoteException, WorkflowAdminServiceWorkflowException,
WorkflowImplAdminServiceWorkflowImplException {
BPSProfile[] bpsProfiles = stubImpl.listBPSProfiles();
if (bpsProfiles == null) {
bpsProfiles = new BPSProfile[0];
}
return bpsProfiles;
}
/**
* Get BPS Profile detail for given profile name
*
* @param profileName
* @return
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public BPSProfile getBPSProfiles(String profileName) throws RemoteException, WorkflowAdminServiceWorkflowException,
WorkflowImplAdminServiceWorkflowImplException {
BPSProfile bpsProfile = stubImpl.getBPSProfile(profileName);
return bpsProfile;
}
/**
* Update BPS Profile
*
* @param bpsProfileDTO
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public void updateBPSProfile(BPSProfile bpsProfileDTO) throws RemoteException,
WorkflowAdminServiceWorkflowException, WorkflowImplAdminServiceWorkflowImplException {
stubImpl.updateBPSProfile(bpsProfileDTO);
}
public void deleteBPSProfile(String profileName) throws RemoteException, WorkflowAdminServiceWorkflowException,
WorkflowImplAdminServiceWorkflowImplException {
stubImpl.removeBPSProfile(profileName);
}
/**
* Retrieve Workflows
*
* @return
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public WorkflowWizard[] listWorkflows() throws RemoteException, WorkflowAdminServiceWorkflowException {
WorkflowWizard[] workflows = stub.listWorkflows();
if (workflows == null) {
workflows = new WorkflowWizard[0];
}
return workflows;
}
public void deleteWorkflow(String workflowId) throws RemoteException, WorkflowAdminServiceWorkflowException {
stub.removeWorkflow(workflowId);
}
public Association[] listAssociationsForWorkflow(String workflowId) throws RemoteException,
WorkflowAdminServiceWorkflowException {
Association[] associationsForWorkflow = stub.listAssociations(workflowId);
if (associationsForWorkflow == null) {
associationsForWorkflow = new Association[0];
}
return associationsForWorkflow;
}
public Association[] listAllAssociations() throws RemoteException, WorkflowAdminServiceWorkflowException {
Association[] associations = stub.listAllAssociations();
if (associations == null) {
associations = new Association[0];
}
return associations;
}
public void deleteAssociation(String associationId) throws RemoteException, WorkflowAdminServiceWorkflowException {
stub.removeAssociation(associationId);
}
public void addAssociation(String workflowId, String associationName, String eventId, String condition)
throws RemoteException, WorkflowAdminServiceWorkflowException {
stub.addAssociation(associationName, workflowId, eventId, condition);
}
/**
* Enable association to allow to execute
*
* @param associationId
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public void enableAssociation(String associationId) throws RemoteException, WorkflowAdminServiceWorkflowException {
stub.changeAssociationState(associationId, true);
}
/**
* Disable association to avoid with execution of the workflows
*
* @param associationId
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/
public void disableAssociation(String associationId) throws RemoteException, WorkflowAdminServiceWorkflowException {
stub.changeAssociationState(associationId, false);
}
public WorkflowEvent getEvent(String id) throws RemoteException {
return stub.getEvent(id);
}
public void deleteRequest(String requestId) throws WorkflowAdminServiceWorkflowException, RemoteException {
stub.deleteWorkflowRequest(requestId);
}
public WorkflowRequestAssociation[] getWorkflowsOfRequest(String requestId) throws
WorkflowAdminServiceWorkflowException, RemoteException {
return stub.getWorkflowsOfRequest(requestId);
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment.loading;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.druid.java.util.common.Intervals;
import io.druid.segment.TestHelper;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.NumberedShardSpec;
import org.apache.commons.io.FileUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.util.Set;
/**
*/
public class LocalDataSegmentFinderTest
{
private static final ObjectMapper mapper = TestHelper.makeJsonMapper();
private static final String DESCRIPTOR_JSON = "descriptor.json";
private static final String INDEX_ZIP = "index.zip";
private static final DataSegment SEGMENT_1 = DataSegment
.builder()
.dataSource("wikipedia")
.interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z"))
.version("2015-10-21T22:07:57.074Z")
.loadSpec(
ImmutableMap.<String, Object>of(
"type",
"local",
"path",
"/tmp/somewhere/index.zip"
)
)
.dimensions(ImmutableList.of("language", "page"))
.metrics(ImmutableList.of("count"))
.build();
private static final DataSegment SEGMENT_2 = DataSegment
.builder(SEGMENT_1)
.interval(Intervals.of("2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z"))
.build();
private static final DataSegment SEGMENT_3 = DataSegment
.builder(SEGMENT_1)
.interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z"))
.version("2015-10-22T22:07:57.074Z")
.build();
private static final DataSegment SEGMENT_4_0 = DataSegment
.builder(SEGMENT_1)
.interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z"))
.shardSpec(new NumberedShardSpec(0, 2))
.build();
private static final DataSegment SEGMENT_4_1 = DataSegment
.builder(SEGMENT_1)
.interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z"))
.shardSpec(new NumberedShardSpec(1, 2))
.build();
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private File dataSourceDir;
private File descriptor1;
private File descriptor2;
private File descriptor3;
private File descriptor4_0;
private File descriptor4_1;
private File indexZip1;
private File indexZip2;
private File indexZip3;
private File indexZip4_0;
private File indexZip4_1;
@BeforeClass
public static void setUpStatic()
{
mapper.registerSubtypes(new NamedType(NumberedShardSpec.class, "numbered"));
}
@Before
public void setUp() throws Exception
{
dataSourceDir = temporaryFolder.newFolder();
descriptor1 = new File(dataSourceDir.getAbsolutePath() + "/interval1/v1/0", DESCRIPTOR_JSON);
descriptor2 = new File(dataSourceDir.getAbsolutePath() + "/interval2/v1/0", DESCRIPTOR_JSON);
descriptor3 = new File(dataSourceDir.getAbsolutePath() + "/interval3/v2/0", DESCRIPTOR_JSON);
descriptor4_0 = new File(dataSourceDir.getAbsolutePath() + "/interval4/v1/0", DESCRIPTOR_JSON);
descriptor4_1 = new File(dataSourceDir.getAbsolutePath() + "/interval4/v1/1", DESCRIPTOR_JSON);
descriptor1.getParentFile().mkdirs();
descriptor2.getParentFile().mkdirs();
descriptor3.getParentFile().mkdirs();
descriptor4_0.getParentFile().mkdirs();
descriptor4_1.getParentFile().mkdirs();
mapper.writeValue(descriptor1, SEGMENT_1);
mapper.writeValue(descriptor2, SEGMENT_2);
mapper.writeValue(descriptor3, SEGMENT_3);
mapper.writeValue(descriptor4_0, SEGMENT_4_0);
mapper.writeValue(descriptor4_1, SEGMENT_4_1);
indexZip1 = new File(descriptor1.getParentFile(), INDEX_ZIP);
indexZip2 = new File(descriptor2.getParentFile(), INDEX_ZIP);
indexZip3 = new File(descriptor3.getParentFile(), INDEX_ZIP);
indexZip4_0 = new File(descriptor4_0.getParentFile(), INDEX_ZIP);
indexZip4_1 = new File(descriptor4_1.getParentFile(), INDEX_ZIP);
indexZip1.createNewFile();
indexZip2.createNewFile();
indexZip3.createNewFile();
indexZip4_0.createNewFile();
indexZip4_1.createNewFile();
}
@Test
public void testFindSegments() throws SegmentLoadingException, IOException
{
final LocalDataSegmentFinder localDataSegmentFinder = new LocalDataSegmentFinder(mapper);
final Set<DataSegment> segments = localDataSegmentFinder.findSegments(dataSourceDir.getAbsolutePath(), false);
Assert.assertEquals(5, segments.size());
DataSegment updatedSegment1 = null;
DataSegment updatedSegment2 = null;
DataSegment updatedSegment3 = null;
DataSegment updatedSegment4_0 = null;
DataSegment updatedSegment4_1 = null;
for (DataSegment dataSegment : segments) {
if (dataSegment.getIdentifier().equals(SEGMENT_1.getIdentifier())) {
updatedSegment1 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_2.getIdentifier())) {
updatedSegment2 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_3.getIdentifier())) {
updatedSegment3 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_4_0.getIdentifier())) {
updatedSegment4_0 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_4_1.getIdentifier())) {
updatedSegment4_1 = dataSegment;
} else {
Assert.fail("Unexpected segment");
}
}
Assert.assertEquals(descriptor1.getAbsolutePath(), getDescriptorPath(updatedSegment1));
Assert.assertEquals(descriptor2.getAbsolutePath(), getDescriptorPath(updatedSegment2));
Assert.assertEquals(descriptor3.getAbsolutePath(), getDescriptorPath(updatedSegment3));
Assert.assertEquals(descriptor4_0.getAbsolutePath(), getDescriptorPath(updatedSegment4_0));
Assert.assertEquals(descriptor4_1.getAbsolutePath(), getDescriptorPath(updatedSegment4_1));
final String serializedSegment1 = mapper.writeValueAsString(updatedSegment1);
final String serializedSegment2 = mapper.writeValueAsString(updatedSegment2);
final String serializedSegment3 = mapper.writeValueAsString(updatedSegment3);
final String serializedSegment4_0 = mapper.writeValueAsString(updatedSegment4_0);
final String serializedSegment4_1 = mapper.writeValueAsString(updatedSegment4_1);
// since updateDescriptor was not enabled, descriptor.json still has stale information
Assert.assertNotEquals(serializedSegment1, FileUtils.readFileToString(descriptor1));
Assert.assertNotEquals(serializedSegment2, FileUtils.readFileToString(descriptor2));
Assert.assertNotEquals(serializedSegment3, FileUtils.readFileToString(descriptor3));
Assert.assertNotEquals(serializedSegment4_0, FileUtils.readFileToString(descriptor4_0));
Assert.assertNotEquals(serializedSegment4_1, FileUtils.readFileToString(descriptor4_1));
// enable updateDescriptor so that descriptors.json will be updated to relfect the new loadSpec
final Set<DataSegment> segments2 = localDataSegmentFinder.findSegments(dataSourceDir.getAbsolutePath(), true);
Assert.assertEquals(segments, segments2);
Assert.assertEquals(serializedSegment1, FileUtils.readFileToString(descriptor1));
Assert.assertEquals(serializedSegment2, FileUtils.readFileToString(descriptor2));
Assert.assertEquals(serializedSegment3, FileUtils.readFileToString(descriptor3));
Assert.assertEquals(serializedSegment4_0, FileUtils.readFileToString(descriptor4_0));
Assert.assertEquals(serializedSegment4_1, FileUtils.readFileToString(descriptor4_1));
}
private String getDescriptorPath(DataSegment segment)
{
final File indexzip = new File(String.valueOf(segment.getLoadSpec().get("path")));
return indexzip.getParent() + "/" + DESCRIPTOR_JSON;
}
@Test(expected = SegmentLoadingException.class)
public void testFindSegmentsFail() throws SegmentLoadingException
{
// remove one of index.zip while keeping its descriptor.json
indexZip4_1.delete();
final LocalDataSegmentFinder localDataSegmentFinder = new LocalDataSegmentFinder(mapper);
localDataSegmentFinder.findSegments(dataSourceDir.getAbsolutePath(), false);
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2014 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.daemon;
import java.util.regex.*;
import org.lockss.util.*;
/**
* A PrunedCachedUrlSetSpec either includes only subtrees that match a
* pattern, or includes only subtrees that do not match a pattern. In the
* include case, paths that are ancestors of the subtree pattern are also
* included (so that iterations will proceed down into the included
* subtree).
*/
public class PrunedCachedUrlSetSpec extends RangeCachedUrlSetSpec {
private static final Logger log = Logger.getLogger("PCUSS");
Pattern includePat;
Pattern excludePat;
/**
* Create a PrunedCachedUrlSetSpec that matches URLs that are within, or
* are an ancestor of, the subtree(s) specified by the pattern.
* @param urlPrefix Common prefix of URLs in the CachedUrlSetSpec.
* @param includePat Pattern specifying the subtree(s) to be included.
*/
public static PrunedCachedUrlSetSpec
includeMatchingSubTrees(String urlPrefix, Pattern includePat) {
return new PrunedCachedUrlSetSpec(urlPrefix, includePat, null);
}
/**
* Create a PrunedCachedUrlSetSpec that matches URLs that are not within
* the subtree(s) specified by the pattern.
* @param urlPrefix Common prefix of URLs in the CachedUrlSetSpec.
* @param excludePat Pattern specifying the subtree(s) to be excluded.
*/
public static PrunedCachedUrlSetSpec
excludeMatchingSubTrees(String urlPrefix, Pattern excludePat) {
return new PrunedCachedUrlSetSpec(urlPrefix, null, excludePat);
}
/**
* Create a PrunedCachedUrlSetSpec that matches URLs that are within, or
* are an ancestor of, the subtree(s) specified by the pattern.
* @param urlPrefix Common prefix of URLs in the CachedUrlSetSpec.
* @param includePattern Regexp specifying the subtree(s) to be included.
* @param flags Compilation flags for the regexp.
*/
public static PrunedCachedUrlSetSpec
includeMatchingSubTrees(String urlPrefix,
String includePattern, int flags) {
Pattern pat = Pattern.compile(includePattern, flags);
return PrunedCachedUrlSetSpec.includeMatchingSubTrees(urlPrefix, pat);
}
/**
* Create a PrunedCachedUrlSetSpec that matches URLs that are not within
* the subtree(s) specified by the pattern.
* @param urlPrefix Common prefix of URLs in the CachedUrlSetSpec.
* @param excludePattern Regexp specifying the subtree(s) to be excluded.
* @param flags Compilation flags for the regexp.
*/
public static PrunedCachedUrlSetSpec
excludeMatchingSubTrees(String urlPrefix,
String excludePattern, int flags) {
Pattern pat = Pattern.compile(excludePattern, flags);
return PrunedCachedUrlSetSpec.excludeMatchingSubTrees(urlPrefix, pat);
}
/**
* Create a PrunedCachedUrlSetSpec that matches URLs that are within, or
* are an ancestor of, the subtree(s) specified by the pattern.
* @param urlPrefix Common prefix of URLs in the CachedUrlSetSpec.
* @param includePattern Regexp specifying the subtree(s) to be included.
*/
public static PrunedCachedUrlSetSpec
includeMatchingSubTrees(String urlPrefix,
String includePattern) {
return PrunedCachedUrlSetSpec.includeMatchingSubTrees(urlPrefix,
includePattern, 0);
}
/**
* Create a PrunedCachedUrlSetSpec that matches URLs that are not within
* the subtree(s) specified by the pattern.
* @param urlPrefix Common prefix of URLs in the CachedUrlSetSpec.
* @param excludePattern Regexp specifying the subtree(s) to be excluded.
*/
public static PrunedCachedUrlSetSpec
excludeMatchingSubTrees(String urlPrefix,
String excludePattern) {
return PrunedCachedUrlSetSpec.excludeMatchingSubTrees(urlPrefix,
excludePattern, 0);
}
/**
* Create a PrunedCachedUrlSetSpec that matches URLs that start with the
* prefix and either are or are not within one or mow subtrees specified
* by a pattern.
* @param urlPrefix Common prefix of URLs in the CachedUrlSetSpec.
* @param includePat If non-null, the set includes URLs that are "below"
* the pattern (i.e., the pattern matches an initial substring of the
* URL), or "above" the pattern (i.e., the URL doesn't match only because
* it's shorter than the pattern).
* @param excludePat If non-null, the set includes URLs that aren't
* "below" the pattern (i.e., the pattern does not match an initial
* substring of the URL).
*/
PrunedCachedUrlSetSpec(String urlPrefix,
Pattern includePat, Pattern excludePat) {
super(urlPrefix, null, null);
this.includePat = includePat;
this.excludePat = excludePat;
}
public boolean matches(String url) {
boolean res = matches0(url);
return res;
}
boolean matches0(String url) {
if (!super.matches(url)) {
return false;
}
if (includePat != null) {
Matcher mat = includePat.matcher(url);
return mat.lookingAt() || mat.hitEnd();
}
if (excludePat != null) {
Matcher mat = excludePat.matcher(url);
return !mat.lookingAt();
}
return true;
}
/** Not supported
* @throws UnsupportedOperationException always
*/
public boolean isDisjoint(CachedUrlSetSpec spec) {
throw new UnsupportedOperationException("isDisjoint() not implemented for PrunedCachedUrlSetSpec");
}
/** Not supported
* @throws UnsupportedOperationException always
*/
public boolean subsumes(CachedUrlSetSpec spec) {
throw new UnsupportedOperationException("subsumes() not implemented for PrunedCachedUrlSetSpec");
}
/**
* @param obj the other spec
* @return true if the prefix and include/exclude patterns are equal.
*/
public boolean equals(Object obj) {
if (obj instanceof PrunedCachedUrlSetSpec) {
PrunedCachedUrlSetSpec spec = (PrunedCachedUrlSetSpec)obj;
return super.equals(obj) &&
RegexpUtil.patEquals(includePat, spec.includePat) &&
RegexpUtil.patEquals(excludePat, spec.excludePat);
} else {
// not a PrunedCachedUrlSetSpec
return false;
}
}
public String toString() {
StringBuilder sb = new StringBuilder("[PCUSS: ");
appendPrefRange(sb);
if (includePat != null) {
sb.append(" incl: ");
sb.append(includePat.pattern());
sb.append("(");
sb.append(includePat.flags());
sb.append(")");
}
if (excludePat != null) {
sb.append(" excl: ");
sb.append(excludePat.pattern());
sb.append("(");
sb.append(excludePat.flags());
sb.append(")");
}
sb.append("]");
return sb.toString();
}
/**
* @return a hash made from the prefix and include/exclude patterns.
*/
public int hashCode() {
int hash = super.hashCode();
if (includePat != null) {
hash += 7 * includePat.hashCode();
}
if (excludePat != null) {
hash += 11 * excludePat.hashCode();
}
return hash;
}
}
| |
package com.podio.hudson;
import hudson.Extension;
import hudson.Launcher;
import hudson.model.BuildListener;
import hudson.model.Result;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Run;
import hudson.model.User;
import hudson.scm.ChangeLogSet;
import hudson.scm.ChangeLogSet.Entry;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Notifier;
import hudson.tasks.Publisher;
import hudson.tasks.Mailer;
import hudson.tasks.Mailer.UserProperty;
import hudson.tasks.junit.CaseResult;
import hudson.tasks.test.AbstractTestResultAction;
import hudson.util.FormValidation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.joda.time.LocalDate;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import com.podio.APIFactory;
import com.podio.ResourceFactory;
import com.podio.app.AppAPI;
import com.podio.app.Application;
import com.podio.common.Reference;
import com.podio.common.ReferenceType;
import com.podio.contact.ContactAPI;
import com.podio.contact.ProfileField;
import com.podio.contact.ProfileMini;
import com.podio.contact.ProfileType;
import com.podio.item.FieldValuesUpdate;
import com.podio.item.ItemAPI;
import com.podio.item.ItemCreate;
import com.podio.item.ItemsResponse;
import com.podio.oauth.OAuthClientCredentials;
import com.podio.oauth.OAuthUsernameCredentials;
import com.podio.task.Task;
import com.podio.task.TaskAPI;
import com.podio.task.TaskCreate;
import com.podio.task.TaskStatus;
import com.podio.user.UserAPI;
import com.sun.jersey.api.client.UniformInterfaceException;
public class PodioBuildNotifier extends Notifier {
@SuppressWarnings("unused")
private static final Logger LOGGER = Logger
.getLogger(PodioBuildNotifier.class.getName());
private final String appId;
@DataBoundConstructor
public PodioBuildNotifier(String appId) {
this.appId = appId;
}
public String getAppId() {
return appId;
}
private APIFactory getBaseAPI() {
DescriptorImpl descriptor = (DescriptorImpl) getDescriptor();
return new APIFactory(new ResourceFactory(new OAuthClientCredentials(
descriptor.clientId, descriptor.clientSecret),
new OAuthUsernameCredentials(descriptor.username,
descriptor.password)));
}
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.BUILD;
}
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher,
BuildListener listener) throws InterruptedException, IOException {
APIFactory apiFactory = getBaseAPI();
String result = StringUtils.capitalize(build.getResult().toString()
.toLowerCase());
result = result.replace('_', ' ');
int spaceId = getSpace(apiFactory);
String url = Mailer.descriptor().getUrl() + build.getParent().getUrl()
+ build.getNumber();
Set<ProfileMini> profiles = getProfiles(apiFactory, spaceId, build);
Integer totalTestCases = null;
Integer failedTestCases = null;
AbstractTestResultAction testResult = build.getTestResultAction();
if (testResult != null) {
totalTestCases = testResult.getTotalCount();
failedTestCases = testResult.getFailCount();
}
String changes = getChangesText(build);
int itemId = postBuild(apiFactory, build.getNumber(), result, url,
changes, profiles, totalTestCases, failedTestCases,
build.getDurationString());
AbstractBuild previousBuild = build.getPreviousBuild();
boolean oldFailed = previousBuild != null
&& previousBuild.getResult() != Result.SUCCESS;
TaskAPI taskAPI = apiFactory.getAPI(TaskAPI.class);
if (oldFailed && build.getResult() == Result.SUCCESS) {
Run firstFailed = getFirstFailure(previousBuild);
Integer firstFailedItemId = getItemId(apiFactory,
firstFailed.getNumber());
if (firstFailedItemId != null) {
List<Task> tasks = taskAPI.getTasksWithReference(new Reference(
ReferenceType.ITEM, firstFailedItemId));
for (Task task : tasks) {
if (task.getStatus() == TaskStatus.ACTIVE) {
taskAPI.completeTask(task.getId());
}
}
}
} else if (!oldFailed && build.getResult() != Result.SUCCESS) {
String text = "Build " + build.getNumber() + " "
+ build.getResult().toString().toLowerCase() + "";
String description = null;
if (testResult != null && testResult.getFailCount() > 0) {
description += testResult.getFailCount()
+ " testcase(s) failed:\n";
List<CaseResult> failedTests = testResult.getFailedTests();
for (CaseResult caseResult : failedTests) {
description += caseResult.getDisplayName() + "\n";
}
}
for (ProfileMini profile : profiles) {
taskAPI.createTaskWithReference(
new TaskCreate(text, description, false,
new LocalDate(), profile.getUserId()),
new Reference(ReferenceType.ITEM, itemId), true);
}
}
return true;
}
private Run getFirstFailure(Run build) {
Run previousBuild = build.getPreviousBuild();
if (previousBuild != null) {
if (previousBuild.getResult() == Result.SUCCESS) {
return build;
}
return getFirstFailure(previousBuild);
} else {
return build;
}
}
private Integer getItemId(APIFactory apiFactory, int buildNumber) {
ItemsResponse response = apiFactory.getAPI(ItemAPI.class)
.getItemsByExternalId(Integer.parseInt(appId),
Integer.toString(buildNumber));
if (response.getFiltered() != 1) {
return null;
}
return response.getItems().get(0).getId();
}
private int postBuild(APIFactory apiFactory, int buildNumber,
String result, String url, String changes,
Set<ProfileMini> profiles, Integer totalTestCases,
Integer failedTestCases, String duration) {
List<FieldValuesUpdate> fields = new ArrayList<FieldValuesUpdate>();
fields.add(new FieldValuesUpdate("build-number", "value", "Build "
+ buildNumber));
fields.add(new FieldValuesUpdate("result", "value", result));
fields.add(new FieldValuesUpdate("url", "value", url));
if (changes != null) {
fields.add(new FieldValuesUpdate("changes", "value", changes));
}
List<Map<String, ?>> subValues = new ArrayList<Map<String, ?>>();
for (ProfileMini profile : profiles) {
subValues.add(Collections.<String, Object> singletonMap("value",
profile.getProfileId()));
}
fields.add(new FieldValuesUpdate("developers", subValues));
if (totalTestCases != null) {
fields.add(new FieldValuesUpdate("total-testcases", "value",
totalTestCases));
}
if (failedTestCases != null) {
fields.add(new FieldValuesUpdate("failed-testcases", "value",
failedTestCases));
}
fields.add(new FieldValuesUpdate("duration", "value", duration));
ItemCreate create = new ItemCreate(Integer.toString(buildNumber),
fields, Collections.<Integer> emptyList(),
Collections.<String> emptyList());
int itemId = apiFactory.getAPI(ItemAPI.class).addItem(
Integer.parseInt(appId), create, true);
return itemId;
}
@Override
public boolean needsToRunAfterFinalized() {
return true;
}
private int getSpace(APIFactory apiFactory) {
return apiFactory.getAPI(AppAPI.class).getApp(Integer.parseInt(appId))
.getSpaceId();
}
private Set<ProfileMini> getProfiles(APIFactory apiFactory, int spaceId,
AbstractBuild<?, ?> build) {
Set<ProfileMini> profiles = new HashSet<ProfileMini>();
Set<User> culprits = build.getCulprits();
if (culprits.size() > 0) {
for (User culprit : culprits) {
ProfileMini profile = getProfile(apiFactory, spaceId, culprit);
if (profile != null) {
profiles.add(profile);
}
}
}
ChangeLogSet<? extends Entry> changeSet = build.getChangeSet();
if (changeSet != null) {
for (Entry entry : changeSet) {
ProfileMini profile = getProfile(apiFactory, spaceId,
entry.getAuthor());
if (profile != null) {
profiles.add(profile);
}
}
}
return profiles;
}
private String getChangesText(AbstractBuild<?, ?> build) {
ChangeLogSet<? extends Entry> changeSet = build.getChangeSet();
if (changeSet == null || changeSet.isEmptySet()) {
return null;
}
String out = "";
for (Entry entry : changeSet) {
if (out.length() > 0) {
out += "\n";
}
out += entry.getMsgAnnotated();
}
return out;
}
private ProfileMini getProfile(APIFactory apiFactory, int spaceId, User user) {
UserProperty mailProperty = user.getProperty(Mailer.UserProperty.class);
if (mailProperty == null) {
return null;
}
String mail = mailProperty.getAddress();
if (mail == null) {
return null;
}
List<ProfileMini> contacts = apiFactory.getAPI(ContactAPI.class)
.getSpaceContacts(spaceId, ProfileField.MAIL, mail, 1, null,
ProfileType.MINI, null);
if (contacts.isEmpty()) {
return null;
}
return contacts.get(0);
}
@Extension
public static final class DescriptorImpl extends
BuildStepDescriptor<Publisher> {
private String username;
private String password;
private String clientId;
private String clientSecret;
public DescriptorImpl() {
super(PodioBuildNotifier.class);
load();
}
@Override
public String getDisplayName() {
return "Podio Build Poster";
}
@Override
public boolean configure(StaplerRequest req, JSONObject formData)
throws FormException {
req.bindParameters(this);
this.username = formData.getString("username");
this.password = formData.getString("password");
this.clientId = formData.getString("clientId");
this.clientSecret = formData.getString("clientSecret");
save();
return super.configure(req, formData);
}
public FormValidation doValidateAuth(
@QueryParameter("appId") final String appId)
throws IOException, ServletException {
APIFactory apiFactory = new APIFactory(new ResourceFactory(
new OAuthClientCredentials(clientId, clientSecret),
new OAuthUsernameCredentials(username, password)));
try {
Application app = apiFactory.getAPI(AppAPI.class).getApp(
Integer.parseInt(appId));
return FormValidation.ok("Connection ok, using app "
+ app.getConfiguration().getName());
} catch (UniformInterfaceException e) {
if (e.getResponse().getStatus() == 404) {
return FormValidation.error("No app found with the id "
+ appId);
} else {
return FormValidation.error("Invalid username or password");
}
} catch (Exception e) {
e.printStackTrace();
return FormValidation.error("Invalid username or password");
}
}
public FormValidation doValidateAPI(
@QueryParameter("username") final String username,
@QueryParameter("password") final String password,
@QueryParameter("clientId") final String clientId,
@QueryParameter("clientSecret") final String clientSecret)
throws IOException, ServletException {
APIFactory baseAPI = new APIFactory(new ResourceFactory(
new OAuthClientCredentials(clientId, clientSecret),
new OAuthUsernameCredentials(username, password)));
try {
String name = baseAPI.getAPI(UserAPI.class).getProfile()
.getName();
return FormValidation.ok("Connection validated, logged in as "
+ name);
} catch (Exception e) {
e.printStackTrace();
return FormValidation.error("Invalid hostname, port or ssl");
}
}
@Override
public Publisher newInstance(StaplerRequest req, JSONObject formData)
throws FormException {
return super.newInstance(req, formData);
}
@Override
public boolean isApplicable(Class<? extends AbstractProject> jobType) {
return true;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientSecret() {
return clientSecret;
}
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.aggregator;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.camel.CamelExchangeException;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Expression;
import org.apache.camel.Predicate;
import org.apache.camel.Processor;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.impl.DefaultExchange;
import org.apache.camel.processor.BodyInAggregatingStrategy;
import org.apache.camel.processor.SendProcessor;
import org.apache.camel.processor.aggregate.AggregateProcessor;
import org.apache.camel.processor.aggregate.AggregationStrategy;
import org.apache.camel.spi.ExceptionHandler;
/**
* @version
*/
public class AggregateProcessorTest extends ContextTestSupport {
private ExecutorService executorService;
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Override
protected void setUp() throws Exception {
super.setUp();
executorService = Executors.newSingleThreadExecutor();
}
public void testAggregateProcessorCompletionPredicate() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+B+END");
mock.expectedPropertyReceived(Exchange.AGGREGATED_COMPLETED_BY, "predicate");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
Predicate complete = body().contains("END");
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionPredicate(complete);
ap.setEagerCheckCompletion(false);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 123);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("END");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("D");
e4.getIn().setHeader("id", 123);
ap.process(e1);
ap.process(e2);
ap.process(e3);
ap.process(e4);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateProcessorCompletionPredicateEager() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+B+END");
mock.expectedPropertyReceived(Exchange.AGGREGATED_COMPLETED_BY, "predicate");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
Predicate complete = body().isEqualTo("END");
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionPredicate(complete);
ap.setEagerCheckCompletion(true);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 123);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("END");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("D");
e4.getIn().setHeader("id", 123);
ap.process(e1);
ap.process(e2);
ap.process(e3);
ap.process(e4);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateProcessorCompletionAggregatedSize() throws Exception {
doTestAggregateProcessorCompletionAggregatedSize(false);
}
public void testAggregateProcessorCompletionAggregatedSizeEager() throws Exception {
doTestAggregateProcessorCompletionAggregatedSize(true);
}
private void doTestAggregateProcessorCompletionAggregatedSize(boolean eager) throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+B+C");
mock.expectedPropertyReceived(Exchange.AGGREGATED_COMPLETED_BY, "size");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionSize(3);
ap.setEagerCheckCompletion(eager);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 123);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("C");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("D");
e4.getIn().setHeader("id", 123);
ap.process(e1);
ap.process(e2);
ap.process(e3);
ap.process(e4);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateProcessorCompletionTimeout() throws Exception {
doTestAggregateProcessorCompletionTimeout(false);
}
public void testAggregateProcessorCompletionTimeoutEager() throws Exception {
doTestAggregateProcessorCompletionTimeout(true);
}
private void doTestAggregateProcessorCompletionTimeout(boolean eager) throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+B+C");
mock.expectedPropertyReceived(Exchange.AGGREGATED_COMPLETED_BY, "timeout");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionTimeout(3000);
ap.setEagerCheckCompletion(eager);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 123);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("C");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("D");
e4.getIn().setHeader("id", 123);
ap.process(e1);
Thread.sleep(250);
ap.process(e2);
Thread.sleep(500);
ap.process(e3);
Thread.sleep(5000);
ap.process(e4);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateCompletionInterval() throws Exception {
// camel context must be started
context.start();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+B+C", "D");
mock.expectedPropertyReceived(Exchange.AGGREGATED_COMPLETED_BY, "interval");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionInterval(3000);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 123);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("C");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("D");
e4.getIn().setHeader("id", 123);
ap.process(e1);
ap.process(e2);
ap.process(e3);
Thread.sleep(5000);
ap.process(e4);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateIgnoreInvalidCorrelationKey() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+C+END");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
Predicate complete = body().contains("END");
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionPredicate(complete);
ap.setIgnoreInvalidCorrelationKeys(true);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("C");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("END");
e4.getIn().setHeader("id", 123);
ap.process(e1);
ap.process(e2);
ap.process(e3);
ap.process(e4);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateBadCorrelationKey() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+C+END");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
Predicate complete = body().contains("END");
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionPredicate(complete);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("C");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("END");
e4.getIn().setHeader("id", 123);
ap.process(e1);
try {
ap.process(e2);
fail("Should have thrown an exception");
} catch (CamelExchangeException e) {
assertEquals("Invalid correlation key. Exchange[Message: B]", e.getMessage());
}
ap.process(e3);
ap.process(e4);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateCloseCorrelationKeyOnCompletion() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+B+END");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
Predicate complete = body().contains("END");
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionPredicate(complete);
ap.setCloseCorrelationKeyOnCompletion(1000);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 123);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("END");
e3.getIn().setHeader("id", 123);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("C");
e4.getIn().setHeader("id", 123);
ap.process(e1);
ap.process(e2);
ap.process(e3);
try {
ap.process(e4);
fail("Should have thrown an exception");
} catch (CamelExchangeException e) {
assertEquals("The correlation key [123] has been closed. Exchange[Message: C]", e.getMessage());
}
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateUseBatchSizeFromConsumer() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+B", "C+D+E");
mock.expectedPropertyReceived(Exchange.AGGREGATED_COMPLETED_BY, "consumer");
Processor done = new SendProcessor(context.getEndpoint("mock:result"));
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setCompletionSize(100);
ap.setCompletionFromBatchConsumer(true);
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
e1.setProperty(Exchange.BATCH_INDEX, 0);
e1.setProperty(Exchange.BATCH_SIZE, 2);
e1.setProperty(Exchange.BATCH_COMPLETE, false);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 123);
e2.setProperty(Exchange.BATCH_INDEX, 1);
e2.setProperty(Exchange.BATCH_SIZE, 2);
e2.setProperty(Exchange.BATCH_COMPLETE, true);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("C");
e3.getIn().setHeader("id", 123);
e3.setProperty(Exchange.BATCH_INDEX, 0);
e3.setProperty(Exchange.BATCH_SIZE, 3);
e3.setProperty(Exchange.BATCH_COMPLETE, false);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("D");
e4.getIn().setHeader("id", 123);
e4.setProperty(Exchange.BATCH_INDEX, 1);
e4.setProperty(Exchange.BATCH_SIZE, 3);
e4.setProperty(Exchange.BATCH_COMPLETE, false);
Exchange e5 = new DefaultExchange(context);
e5.getIn().setBody("E");
e5.getIn().setHeader("id", 123);
e5.setProperty(Exchange.BATCH_INDEX, 2);
e5.setProperty(Exchange.BATCH_SIZE, 3);
e5.setProperty(Exchange.BATCH_COMPLETE, true);
ap.process(e1);
ap.process(e2);
ap.process(e3);
ap.process(e4);
ap.process(e5);
assertMockEndpointsSatisfied();
ap.stop();
}
public void testAggregateLogFailedExchange() throws Exception {
doTestAggregateLogFailedExchange(null);
}
public void testAggregateHandleFailedExchange() throws Exception {
final AtomicBoolean tested = new AtomicBoolean();
ExceptionHandler myHandler = new ExceptionHandler() {
public void handleException(Throwable exception) {
}
public void handleException(String message, Throwable exception) {
}
public void handleException(String message, Exchange exchange, Throwable exception) {
assertEquals("Error processing aggregated exchange", message);
assertEquals("B+Kaboom+END", exchange.getIn().getBody());
assertEquals("Damn", exception.getMessage());
tested.set(true);
}
};
doTestAggregateLogFailedExchange(myHandler);
assertEquals(true, tested.get());
}
private void doTestAggregateLogFailedExchange(ExceptionHandler handler) throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("A+END");
Processor done = new Processor() {
public void process(Exchange exchange) throws Exception {
if (exchange.getIn().getBody(String.class).contains("Kaboom")) {
throw new IllegalArgumentException("Damn");
}
// else send it further along
SendProcessor send = new SendProcessor(context.getEndpoint("mock:result"));
send.start();
send.process(exchange);
}
};
Expression corr = header("id");
AggregationStrategy as = new BodyInAggregatingStrategy();
AggregateProcessor ap = new AggregateProcessor(context, done, corr, as, executorService);
ap.setEagerCheckCompletion(true);
ap.setCompletionPredicate(body().isEqualTo("END"));
if (handler != null) {
ap.setExceptionHandler(handler);
}
ap.start();
Exchange e1 = new DefaultExchange(context);
e1.getIn().setBody("A");
e1.getIn().setHeader("id", 123);
Exchange e2 = new DefaultExchange(context);
e2.getIn().setBody("B");
e2.getIn().setHeader("id", 456);
Exchange e3 = new DefaultExchange(context);
e3.getIn().setBody("Kaboom");
e3.getIn().setHeader("id", 456);
Exchange e4 = new DefaultExchange(context);
e4.getIn().setBody("END");
e4.getIn().setHeader("id", 456);
Exchange e5 = new DefaultExchange(context);
e5.getIn().setBody("END");
e5.getIn().setHeader("id", 123);
ap.process(e1);
ap.process(e2);
ap.process(e3);
ap.process(e4);
ap.process(e5);
assertMockEndpointsSatisfied();
ap.stop();
}
}
| |
package com.zandero.utils;
import java.io.*;
import java.net.URL;
import java.util.LinkedHashSet;
import java.util.Scanner;
import java.util.Set;
/**
* Utility class to read resources
*/
public final class ResourceUtils {
private static final int BUFFER_SIZE = 100_000;
private static final String UTF_8 = "UTF-8";
private ResourceUtils() {
// hide constructor
}
/**
* Loads class resource to String
*
* @param resourceFile to read
* @param clazz to use for resource access
* @return String representing the resource or null if resource could not be read
*/
@Deprecated
public static String getResourceAsString(String resourceFile, Class clazz) {
Assert.notNullOrEmptyTrimmed(resourceFile, "Missing resource file!");
Scanner scanner = null;
try {
InputStream resource = clazz.getResourceAsStream(resourceFile);
scanner = new Scanner(resource, UTF_8);
return scanner.useDelimiter("\\A").next();
}
catch (Exception e) {
return null;
}
finally {
if (scanner != null) {
scanner.close();
}
}
}
public static String getResourceAsString(String resourceFile) {
return getResourceAsString(resourceFile, ResourceUtils.class);
}
public InputStream getResourceAsStream(String resource) {
return ResourceUtils.class.getResourceAsStream(resource);
}
/**
* Loads resource as a set of Strings, where each word is added to the set
*
* @param resourceFile to read
* @param clazz to use for resource access
* @return set of strings (lines) or null if resource could not be read
*/
public static Set<String> getResourceWords(String resourceFile, Class clazz) {
Assert.notNullOrEmptyTrimmed(resourceFile, "Missing resource file!");
Scanner scanner = null;
try {
InputStream resource = clazz.getResourceAsStream(resourceFile);
scanner = new Scanner(resource, UTF_8);
Set<String> list = new LinkedHashSet<>();
while (scanner.hasNext()) {
String next = scanner.next();
if (next != null && next.trim().length() > 0) {
list.add(next);
}
}
return list;
}
catch (Exception e) {
return null;
}
finally {
if (scanner != null) {
scanner.close();
}
}
}
/**
* Loads resource as a set of Strings, where each word is added to the set
*
* @param resourceFile to read
* @return set of strings (lines) or null if resource could not be read
*/
public static Set<String> getResourceWords(String resourceFile) {
return getResourceWords(resourceFile, ResourceUtils.class);
}
/**
* Get resource last modified date
*
* @param resourceFile to read
* @param clazz to use for resource access
* @return last modified date or null if resource could not be read
*/
@Deprecated
public static Long getLastModifiedTime(String resourceFile, Class clazz) {
Assert.notNullOrEmptyTrimmed(resourceFile, "Missing resource file!");
try {
URL url = clazz.getResource(resourceFile);
return url.openConnection().getLastModified(); // get last modified date of resource
}
catch (IOException e) {
return null;
}
}
public static Long getLastModifiedTime(String resourceFile) {
return getLastModifiedTime(resourceFile, ResourceUtils.class);
}
/**
* Load input stream into string
*
* @param is stream
* @return String representation of given input
*/
public static String getString(final InputStream is) {
return getString(is, UTF_8);
}
/**
* Load input stream into string
*
* @param is stream
* @param encoding to use when reading input stream
* @return String representation of given input
*/
public static String getString(final InputStream is, String encoding) {
if (is == null) {
return null;
}
if (StringUtils.isNullOrEmptyTrimmed(encoding)) {
encoding = UTF_8;
}
final char[] buffer = new char[BUFFER_SIZE];
final StringBuilder out = new StringBuilder();
try {
try (Reader in = new InputStreamReader(is, encoding)) {
for (; ; ) {
int rsz = in.read(buffer, 0, buffer.length);
if (rsz < 0) {
break;
}
out.append(buffer, 0, rsz);
}
}
}
catch (IOException ioe) {
throw new RuntimeException(ioe);
}
return out.toString();
}
/**
* Load input stream into byte array
*
* @param is stream
* @return byte representation of given input
*/
public static byte[] getBytes(InputStream is) {
if (is == null) {
return null;
}
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
int nRead;
byte[] data = new byte[BUFFER_SIZE];
try {
while ((nRead = is.read(data, 0, data.length)) != -1) {
buffer.write(data, 0, nRead);
}
buffer.flush();
}
catch (IOException ignored) {
}
return buffer.toByteArray();
}
/**
* Reads file into String
*
* @param file to be read
* @return file content
* @throws IOException in case file does't exist or is not a file
*/
public static String readFileToString(File file) throws IOException {
Assert.isTrue(file.exists(), "File '" + file + "' does not exist");
Assert.isFalse(file.isDirectory(), "File '" + file + "' is a directory");
Assert.isTrue(file.canRead(), "File '" + file + "' cannot be read");
FileInputStream stream = new FileInputStream(file);
return getString(stream);
}
/**
* Gets absolute file path of resource
*
* @param resource to get absolute file path for
* @param clazz namespace holding resource
* @return file path if found
* @throws IllegalArgumentException if resource can not be found
*/
@Deprecated
public static String getResourceAbsolutePath(String resource, Class clazz) {
Assert.notNullOrEmptyTrimmed(resource, "Missing resource name!");
URL file = clazz.getResource(resource);
Assert.notNull(file, "Resource: '" + resource + "', not found!");
return file.getFile();
}
public static String getResourceAbsolutePath(String resource) {
return getResourceAbsolutePath(resource, ResourceUtils.class);
}
}
| |
package io.b1ackr0se.carrental.fragment;
import android.app.Fragment;
import android.content.Context;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.parse.FindCallback;
import com.parse.ParseException;
import com.parse.ParseObject;
import com.parse.ParseQuery;
import java.util.ArrayList;
import java.util.List;
import butterknife.Bind;
import butterknife.ButterKnife;
import io.b1ackr0se.carrental.R;
import io.b1ackr0se.carrental.activity.MainActivity;
import io.b1ackr0se.carrental.adapter.HotProductAdapter;
import io.b1ackr0se.carrental.application.CustomApplication;
import io.b1ackr0se.carrental.model.Order;
import io.b1ackr0se.carrental.model.Product;
import io.b1ackr0se.carrental.util.Utility;
/**
* A simple {@link Fragment} subclass.
*/
public class ReportFragment extends Fragment {
@Bind(R.id.product)TextView productTextView;
@Bind(R.id.recycler_view)RecyclerView recyclerView;
@Bind(R.id.number_of_order)TextView orderTextView;
@Bind(R.id.order_done)TextView doneOrder;
@Bind(R.id.order_pending)TextView pendingOrder;
@Bind(R.id.order_denied)TextView deniedOrder;
@Bind(R.id.income)TextView incomeTextView;
@Bind(R.id.member)TextView memberTextView;
@Bind(R.id.content)View content;
private Context context;
private ArrayList<Order> orders;
private ArrayList<Product> products;
private boolean isRetrieveProductFinished = false, isRetrieveNewMember = false;
private int done = 0, pending = 0, denied = 0, newMember = 0, income = 0;
public ReportFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_report, container, false);
ButterKnife.bind(this, view);
context = getActivity();
((MainActivity) context).showLoading();
loadOrder();
loadUser();
return view;
}
private void loadOrder() {
final ParseQuery<ParseObject> query = ParseQuery.getQuery("Order");
query.findInBackground(new FindCallback<ParseObject>() {
@Override
public void done(List<ParseObject> objects, ParseException e) {
if (e == null) {
if (objects.size() > 0) {
orders = new ArrayList<>();
for (int i = 0; i < objects.size(); i++) {
if(Utility.isDateThisWeek(objects.get(i).getLong("Date"))) {
ParseObject object = objects.get(i);
Order order = new Order();
order.setId(object.getObjectId());
order.setDays(object.getInt("Days"));
order.setPrice(object.getInt("Price"));
order.setStatus(object.getInt("Status"));
order.setDate(object.getLong("Date"));
order.setProductId(object.getInt("ProductId"));
order.setUserId(object.getString("UserId"));
orders.add(order);
}
}
for (int j = 0; j < orders.size(); j++) {
if(orders.get(j).getStatus() == CustomApplication.ORDER_STATUS_DONE) {
done++;
income += orders.get(j).getPrice();
} else if(orders.get(j).getStatus() == CustomApplication.ORDER_STATUS_DENIED)
denied++;
else {
pending++;
income += orders.get(j).getPrice();
}
}
loadProduct(query);
}
}
}
});
}
private void loadProduct(ParseQuery<ParseObject> query) {
ParseQuery<ParseObject> productQuery = new ParseQuery<>("Product").whereMatchesKeyInQuery("productId", "ProductId", query);
productQuery.findInBackground(new FindCallback<ParseObject>() {
@Override
public void done(List<ParseObject> objects, ParseException e) {
if (e == null) {
if (objects.size() != 0) {
products = new ArrayList<>();
for (int i = objects.size() - 1, j = 0; i >= 0; i--, j++) {
ParseObject object = objects.get(i);
int id = object.getInt("productId");
String name = object.getString("Name");
String description = object.getString("Description");
int price = object.getInt("Price");
int category = object.getInt("CategoryId");
ArrayList<String> list = new ArrayList<>();
String image = object.getParseFile("Image").getUrl();
String imageUrl = Uri.parse(image).toString();
list.add(imageUrl);
Product product = new Product();
product.setId(id);
product.setName(name);
product.setDescription(description.replaceAll("(?m)^[ \t]*\r?\n", ""));
product.setPrice(price);
product.setCategory(category);
product.setImages(list);
for (int x = 0; x < orders.size(); x++) {
Order order = orders.get(x);
if (order.getProductId() == id) {
order.setProduct(product);
}
}
if (products.size() == 0) products.add(product);
else {
if (!products.contains(product))
products.add(product);
}
}
}
isRetrieveProductFinished = true;
if(isRetrieveNewMember) setUpData();
} else
((MainActivity) context).hideLoading(true);
}
});
}
private void loadUser() {
ParseQuery<ParseObject> query = ParseQuery.getQuery("User");
query.whereEqualTo("Type", CustomApplication.TYPE_USER);
query.findInBackground(new FindCallback<ParseObject>() {
@Override
public void done(List<ParseObject> objects, ParseException e) {
if (e == null) {
if (objects.size() > 0) {
for (int i = 0; i < objects.size(); i++) {
if (Utility.isDateThisWeek(objects.get(i).getLong("JoinDate")))
newMember++;
}
}
isRetrieveNewMember = true;
if(isRetrieveProductFinished) setUpData();
} else
((MainActivity) context).hideLoading(true);
}
});
}
private void setUpData() {
((MainActivity) context).hideLoading(false);
productTextView.setText(String.valueOf(products.size()));
orderTextView.setText(String.valueOf(orders.size()));
doneOrder.setText(String.valueOf(done));
pendingOrder.setText(String.valueOf(pending));
deniedOrder.setText(String.valueOf(denied));
incomeTextView.setText(Utility.showCurrency(income) + "$");
memberTextView.setText(String.valueOf(newMember));
if(products.size()>0) {
HotProductAdapter adapter = new HotProductAdapter(context, products);
recyclerView.setLayoutManager(new GridLayoutManager(context, 1, GridLayoutManager.HORIZONTAL, false));
recyclerView.setAdapter(adapter);
recyclerView.setVisibility(View.VISIBLE);
}
content.setVisibility(View.VISIBLE);
}
}
| |
package jsat.parameters;
import java.io.Serializable;
import java.lang.annotation.*;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import jsat.distributions.empirical.kernelfunc.KernelFunction;
import jsat.linear.distancemetrics.DistanceMetric;
import jsat.math.decayrates.DecayRate;
/**
* This interface provides a programmable manner in which the parameters of an
* algorithm may be altered and adjusted.
*
* @author Edward Raff
*/
public abstract class Parameter implements Serializable
{
/**
* Adding this annotation to a field tells the
* {@link #getParamsFromMethods(java.lang.Object)} method to search this
* object recursively for more parameter get/set
* pairs.<br><br>
* Placing this annotation on a {@link Collection} will cause the search to
* be done recursively over each item in the collection.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public static @interface ParameterHolder
{
boolean skipSelfNamePrefix() default false;
}
/**
* Some variables of a learning method may be adjustable without having to
* re-train the whole data set. <tt>false</tt> is returned if this is such a
* parameter, <tt>true</tt> if the learning method will need to be
* retrained after the parameter has changed. <br><br>
* By default, this method returns <tt>true</tt> unless overwritten, as it
* is always safe to retrain the classifier if a parameter was changed.
* @return <tt>true</tt> if changing this parameter requires a re-training
* of the algorithm, or <tt>false</tt> if no-retraining is needed to take
* effect.
*/
public boolean requiresRetrain(){
return true;
};
/**
* Returns the name of this parameter using only valid ACII characters.
* @return the ACII name
*/
abstract public String getASCIIName();
/**
* Returns the display name of this parameter. By default, this returns the
* {@link #getASCIIName() ASCII name} of the parameter. If one exists, a
* name using Unicode characters may be returned instead.
*
* @return the name of this parameter
*/
public String getName()
{
return getASCIIName();
}
@Override
public String toString()
{
return getName();
}
@Override
public int hashCode()
{
return getName().hashCode();
}
/**
* Returns a string indicating the value currently held by the Parameter
*
* @return a string representation of the parameter's value
*/
abstract public String getValueString();
@Override
public boolean equals(Object obj)
{
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final Parameter other = (Parameter) obj;
return this.getName().equals(other.getName());
}
/**
* Creates a map of all possible parameter names to their corresponding object. No two parameters may have the same name.
* @param params the list of parameters to create a map for
* @return a map of string names to their parameters
* @throws RuntimeException if two parameters have the same name
*/
public static Map<String, Parameter> toParameterMap(List<Parameter> params)
{
Map<String, Parameter> map = new HashMap<String, Parameter>(params.size());
for(Parameter param : params)
{
if(map.put(param.getASCIIName(), param) != null)
throw new RuntimeException("Name collision, two parameters use the name '" + param.getASCIIName() + "'");
if(!param.getName().equals(param.getASCIIName()))//Dont put it in again
if(map.put(param.getName(), param) != null)
throw new RuntimeException("Name collision, two parameters use the name '" + param.getName() + "'");
}
return map;
}
/**
* Given an object, this method will use reflection to automatically find
* getter and setter method pairs, and create Parameter object for each
* getter setter pair.<br>
* Getters are found by searching for no argument methods that start with
* "get" or "is". Setters are found by searching for one argument methods
* that start with "set".
* A getter and setter are a pair only if everything after the prefix is the
* same in the method's name, and the return type of the getter is the same
* class as the argument for the setter. <br>
* Current types supported are:
* <ul>
* <li>integer</li>
* <li>doubles</li>
* <li>booleans</li>
* <li>{@link jsat.distributions.empirical.kernelfunc.KernelFunction Kernel Functions}</li>
* <li>{@link jsat.linear.distancemetrics.DistanceMetric Distance Metrics}</li>
* <li>{@link Enum Enums}</li>
* </ul>
*
* @param obj
* @return a list of parameter objects generated from the given object
*/
public static List<Parameter> getParamsFromMethods(final Object obj)
{
return getParamsFromMethods(obj, "");
}
private static List<Parameter> getParamsFromMethods(final Object obj, String prefix)
{
Map<String, Method> getMethods = new HashMap<String, Method>();
Map<String, Method> setMethods = new HashMap<String, Method>();
//Collect potential get/set method pairs
for(Method method : obj.getClass().getMethods())
{
int paramCount = method.getParameterTypes().length;
if(method.isVarArgs() || paramCount > 1)
continue;
String name = method.getName();
if(name.startsWith("get") && paramCount == 0)
getMethods.put(name.substring(3), method);
else if(name.startsWith("is") && paramCount == 0)
getMethods.put(name.substring(2), method);
else if(name.startsWith("set") && paramCount == 1)
setMethods.put(name.substring(3), method);
}
//Find pairings and add to list
List<Parameter> params = new ArrayList<Parameter>(Math.min(getMethods.size(), setMethods.size()));
for(Map.Entry<String, Method> entry : setMethods.entrySet())
{
final Method setMethod = entry.getValue();
final Method getMethod = getMethods.get(entry.getKey());
if(getMethod == null)
continue;
final Class retClass = getMethod.getReturnType();
final Class argClass = entry.getValue().getParameterTypes()[0];
if(!retClass.equals(argClass))
continue;
final String name = spaceCamelCase(entry.getKey());
//Found a match do we know how to handle it?
Parameter param = getParam(obj, argClass, getMethod, setMethod, prefix + name);
if(param != null)
params.add(param);
}
//Find params from field objects
//first get all fields of this object
List<Field> fields = new ArrayList<Field>();
Class curClassLevel = obj.getClass();
while(curClassLevel != null)
{
fields.addAll(Arrays.asList(curClassLevel.getDeclaredFields()));
curClassLevel = curClassLevel.getSuperclass();
}
final String simpleObjName = obj.getClass().getSimpleName();
//For each field, check if it has our magic annotation
for(Field field : fields)
{
Annotation[] annotations = field.getAnnotations();
for(Annotation annotation : annotations)
{
if(annotation.annotationType().equals(ParameterHolder.class))
{
ParameterHolder annotationPH = (ParameterHolder) annotation;
//get the field value fromt he object passed in
try
{
//If its private/protected we are not int he same object chain
field.setAccessible(true);
Object paramHolder = field.get(obj);
if(paramHolder instanceof Collection)//serach for each item in the collection
{
Collection toSearch = (Collection) paramHolder;
for(Object paramHolderSub : toSearch)
{
String subPreFix = paramHolderSub.getClass().getSimpleName() + "_";
if(annotationPH.skipSelfNamePrefix())
subPreFix = prefix.replace(simpleObjName+"_", "") + subPreFix;
else
subPreFix = prefix + subPreFix;
params.addAll(Parameter.getParamsFromMethods(paramHolderSub, subPreFix));
}
}
else if(paramHolder != null)//search the item directly
{
String subPreFix = paramHolder.getClass().getSimpleName() + "_";
if (annotationPH.skipSelfNamePrefix())
subPreFix = prefix.replace(simpleObjName + "_", "") + subPreFix;
else
subPreFix = prefix + subPreFix;
params.addAll(Parameter.getParamsFromMethods(paramHolder, subPreFix));
}
}
catch (IllegalArgumentException ex)
{
Logger.getLogger(Parameter.class.getName()).log(Level.SEVERE, null, ex);
}
catch (IllegalAccessException ex)
{
Logger.getLogger(Parameter.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
return params;
}
private static Parameter getParam(final Object targetObject, final Class varClass, final Method getMethod, final Method setMethod, final String asciiName)
{
return getParam(targetObject, varClass, getMethod, setMethod, asciiName, null);
}
private static Parameter getParam(final Object targetObject, final Class varClass, final Method getMethod, final Method setMethod, final String asciiName, final String uniName)
{
Parameter param = null;
if(varClass.equals(double.class) || varClass.equals(Double.class))
{
param = new DoubleParameter()
{
@Override
public double getValue()
{
try
{
return (Double) getMethod.invoke(targetObject);
}
catch (Exception ex)
{
}
return Double.NaN;
}
@Override
public boolean setValue(double val)
{
try
{
setMethod.invoke(targetObject, val);
return true;
}
catch (Exception ex)
{
}
return false;
}
@Override
public String getASCIIName()
{
return asciiName;
}
@Override
public String getName()
{
if(uniName == null)
return super.getName();
else
return uniName;
}
};
}
else if(varClass.equals(int.class) || varClass.equals(Integer.class))
{
param = new IntParameter()
{
@Override
public int getValue()
{
try
{
return (Integer) getMethod.invoke(targetObject);
}
catch (Exception ex)
{
}
return -1;
}
@Override
public boolean setValue(int val)
{
try
{
setMethod.invoke(targetObject, val);
return true;
}
catch (Exception ex)
{
}
return false;
}
@Override
public String getASCIIName()
{
return asciiName;
}
@Override
public String getName()
{
if(uniName == null)
return super.getName();
else
return uniName;
}
};
}
else if(varClass.equals(boolean.class) || varClass.equals(Boolean.class))
{
param = new BooleanParameter()
{
@Override
public boolean getValue()
{
try
{
return (Boolean) getMethod.invoke(targetObject);
}
catch (Exception ex)
{
}
return false;
}
@Override
public boolean setValue(boolean val)
{
try
{
setMethod.invoke(targetObject, val);
return true;
}
catch (Exception ex)
{
}
return false;
}
@Override
public String getASCIIName()
{
return asciiName;
}
@Override
public String getName()
{
if(uniName == null)
return super.getName();
else
return uniName;
}
};
}
else if(varClass.equals(KernelFunction.class))
{
param = new KernelFunctionParameter()
{
@Override
public KernelFunction getObject()
{
try
{
return (KernelFunction) getMethod.invoke(targetObject);
}
catch (Exception ex)
{
}
return null;
}
@Override
public boolean setObject(KernelFunction val)
{
try
{
setMethod.invoke(targetObject, val);
return true;
}
catch (Exception ex)
{
}
return false;
}
};
}
else if(varClass.equals(DistanceMetric.class))
{
param = new MetricParameter()
{
@Override
public DistanceMetric getMetric()
{
try
{
return (DistanceMetric) getMethod.invoke(targetObject);
}
catch (Exception ex)
{
}
return null;
}
@Override
public boolean setMetric(DistanceMetric val)
{
try
{
setMethod.invoke(targetObject, val);
return true;
}
catch (Exception ex)
{
}
return false;
}
};
}
else if(varClass.equals(DecayRate.class))
{
param = new DecayRateParameter() {
@Override
public DecayRate getObject()
{
try
{
return (DecayRate) getMethod.invoke(targetObject);
}
catch (Exception ex)
{
}
return null;
}
@Override
public boolean setObject(DecayRate obj)
{
try
{
setMethod.invoke(targetObject, obj);
return true;
}
catch (Exception ex)
{
}
return false;
}
@Override
public String getASCIIName()
{
return asciiName;
}
@Override
public String getName()
{
if(uniName == null)
return super.getName();
else
return uniName;
}
};
}
else if(varClass.isEnum())//We can create an ObjectParameter for enums
{
param = new ObjectParameter() {
@Override
public Object getObject()
{
try
{
return getMethod.invoke(targetObject);
}
catch (Exception ex)
{
}
return null;
}
@Override
public boolean setObject(Object val)
{
try
{
setMethod.invoke(targetObject, val);
return true;
}
catch (Exception ex)
{
}
return false;
}
@Override
public List parameterOptions()
{
return Collections.unmodifiableList(Arrays.asList(varClass.getEnumConstants()));
}
@Override
public String getASCIIName()
{
return asciiName;
}
@Override
public String getName()
{
if(uniName == null)
return super.getName();
else
return uniName;
}
};
}
return param;
}
/**
* Returns a version of the same string that has spaced inserted before each
* capital letter
* @param in the CamelCase string
* @return the spaced Camel Case string
*/
private static String spaceCamelCase(String in)
{
StringBuilder sb = new StringBuilder(in.length()+5);
for(int i = 0; i < in.length(); i++)
{
char c = in.charAt(i);
if(Character.isUpperCase(c))
sb.append(' ');
sb.append(c);
}
return sb.toString().trim();
}
}
| |
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.ide.xcode.plugins;
import org.apache.commons.lang.StringUtils;
import org.gradle.api.Action;
import org.gradle.api.GradleException;
import org.gradle.api.Incubating;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.ArtifactView;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.component.ComponentIdentifier;
import org.gradle.api.artifacts.component.ProjectComponentIdentifier;
import org.gradle.api.component.SoftwareComponent;
import org.gradle.api.file.FileCollection;
import org.gradle.api.internal.project.ProjectInternal;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.Delete;
import org.gradle.api.tasks.Sync;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.ide.xcode.XcodeExtension;
import org.gradle.ide.xcode.XcodeRootExtension;
import org.gradle.ide.xcode.internal.DefaultXcodeExtension;
import org.gradle.ide.xcode.internal.DefaultXcodeProject;
import org.gradle.ide.xcode.internal.DefaultXcodeRootExtension;
import org.gradle.ide.xcode.internal.DefaultXcodeWorkspace;
import org.gradle.ide.xcode.internal.XcodeProjectMetadata;
import org.gradle.ide.xcode.internal.XcodePropertyAdapter;
import org.gradle.ide.xcode.internal.XcodeTarget;
import org.gradle.ide.xcode.internal.xcodeproj.GidGenerator;
import org.gradle.ide.xcode.internal.xcodeproj.PBXTarget;
import org.gradle.ide.xcode.tasks.GenerateSchemeFileTask;
import org.gradle.ide.xcode.tasks.GenerateWorkspaceSettingsFileTask;
import org.gradle.ide.xcode.tasks.GenerateXcodeProjectFileTask;
import org.gradle.ide.xcode.tasks.GenerateXcodeWorkspaceFileTask;
import org.gradle.internal.Actions;
import org.gradle.language.cpp.CppBinary;
import org.gradle.language.cpp.CppExecutable;
import org.gradle.language.cpp.CppLibrary;
import org.gradle.language.cpp.CppSharedLibrary;
import org.gradle.language.cpp.CppStaticLibrary;
import org.gradle.language.cpp.ProductionCppComponent;
import org.gradle.language.cpp.internal.DefaultCppBinary;
import org.gradle.language.cpp.plugins.CppApplicationPlugin;
import org.gradle.language.cpp.plugins.CppLibraryPlugin;
import org.gradle.language.swift.ProductionSwiftComponent;
import org.gradle.language.swift.SwiftBinary;
import org.gradle.language.swift.SwiftExecutable;
import org.gradle.language.swift.SwiftSharedLibrary;
import org.gradle.language.swift.SwiftStaticLibrary;
import org.gradle.language.swift.internal.DefaultSwiftBinary;
import org.gradle.language.swift.plugins.SwiftApplicationPlugin;
import org.gradle.language.swift.plugins.SwiftLibraryPlugin;
import org.gradle.nativeplatform.test.xctest.SwiftXCTestSuite;
import org.gradle.nativeplatform.test.xctest.plugins.XCTestConventionPlugin;
import org.gradle.plugins.ide.internal.IdeArtifactRegistry;
import org.gradle.plugins.ide.internal.IdePlugin;
import org.gradle.util.CollectionUtils;
import javax.inject.Inject;
import java.io.File;
/**
* A plugin for creating a XCode project for a gradle project.
*
* @since 4.2
*/
@Incubating
public class XcodePlugin extends IdePlugin {
private final GidGenerator gidGenerator;
private final ObjectFactory objectFactory;
private final IdeArtifactRegistry artifactRegistry;
private DefaultXcodeProject xcodeProject;
@Inject
public XcodePlugin(GidGenerator gidGenerator, ObjectFactory objectFactory, IdeArtifactRegistry artifactRegistry) {
this.gidGenerator = gidGenerator;
this.objectFactory = objectFactory;
this.artifactRegistry = artifactRegistry;
}
@Override
protected String getLifecycleTaskName() {
return "xcode";
}
@Override
protected void onApply(final Project project) {
TaskProvider<? extends Task> lifecycleTask = getLifecycleTask();
lifecycleTask.configure(withDescription("Generates XCode project files (pbxproj, xcworkspace, xcscheme)"));
if (isRoot()) {
DefaultXcodeRootExtension xcode = (DefaultXcodeRootExtension) project.getExtensions().create(XcodeRootExtension.class, "xcode", DefaultXcodeRootExtension.class, objectFactory);
xcodeProject = xcode.getProject();
final GenerateXcodeWorkspaceFileTask workspaceTask = createWorkspaceTask(project, xcode.getWorkspace());
lifecycleTask.configure(dependsOn(workspaceTask));
addWorkspace(xcode.getWorkspace());
} else {
DefaultXcodeExtension xcode = (DefaultXcodeExtension) project.getExtensions().create(XcodeExtension.class, "xcode", DefaultXcodeExtension.class, objectFactory);
xcodeProject = xcode.getProject();
}
xcodeProject.setLocationDir(project.file(project.getName() + ".xcodeproj"));
GenerateXcodeProjectFileTask projectTask = createProjectTask((ProjectInternal) project);
lifecycleTask.configure(dependsOn(projectTask));
project.getTasks().addRule("Xcode bridge tasks begin with _xcode. Do not call these directly.", new XcodeBridge(xcodeProject, project));
configureForSwiftPlugin(project);
configureForCppPlugin(project);
includeBuildFilesInProject(project);
configureXcodeCleanTask(project);
}
private void includeBuildFilesInProject(Project project) {
// TODO: Add other build like files `build.gradle.kts`, `settings.gradle(.kts)`, other `.gradle`, `gradle.properties`
if (project.getBuildFile().exists()) {
xcodeProject.getGroups().getRoot().from(project.getBuildFile());
}
}
private void configureXcodeCleanTask(Project project) {
Delete cleanTask = project.getTasks().create("cleanXcodeProject", Delete.class);
cleanTask.delete(xcodeProject.getLocationDir());
if (isRoot()) {
cleanTask.delete(project.file(project.getName() + ".xcworkspace"));
}
getCleanTask().configure(Actions.composite(withDescription("Cleans XCode project files (xcodeproj)"), dependsOn(cleanTask)));
}
private GenerateXcodeProjectFileTask createProjectTask(final ProjectInternal project) {
File xcodeProjectPackageDir = xcodeProject.getLocationDir();
GenerateWorkspaceSettingsFileTask workspaceSettingsFileTask = project.getTasks().create("xcodeProjectWorkspaceSettings", GenerateWorkspaceSettingsFileTask.class);
workspaceSettingsFileTask.setOutputFile(new File(xcodeProjectPackageDir, "project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings"));
GenerateXcodeProjectFileTask projectFileTask = project.getTasks().create("xcodeProject", GenerateXcodeProjectFileTask.class);
projectFileTask.dependsOn(workspaceSettingsFileTask);
projectFileTask.dependsOn(xcodeProject.getTaskDependencies());
projectFileTask.dependsOn(project.getTasks().withType(GenerateSchemeFileTask.class));
projectFileTask.setXcodeProject(xcodeProject);
projectFileTask.setOutputFile(new File(xcodeProjectPackageDir, "project.pbxproj"));
artifactRegistry.registerIdeProject(new XcodeProjectMetadata(xcodeProject, projectFileTask));
return projectFileTask;
}
private GenerateXcodeWorkspaceFileTask createWorkspaceTask(Project project, DefaultXcodeWorkspace workspace) {
File xcodeWorkspacePackageDir = project.file(project.getName() + ".xcworkspace");
workspace.getLocation().set(xcodeWorkspacePackageDir);
GenerateWorkspaceSettingsFileTask workspaceSettingsFileTask = project.getTasks().create("xcodeWorkspaceWorkspaceSettings", GenerateWorkspaceSettingsFileTask.class);
workspaceSettingsFileTask.setOutputFile(new File(xcodeWorkspacePackageDir, "xcshareddata/WorkspaceSettings.xcsettings"));
GenerateXcodeWorkspaceFileTask workspaceFileTask = project.getTasks().create("xcodeWorkspace", GenerateXcodeWorkspaceFileTask.class);
workspaceFileTask.dependsOn(workspaceSettingsFileTask);
workspaceFileTask.setOutputFile(new File(xcodeWorkspacePackageDir, "contents.xcworkspacedata"));
workspaceFileTask.setXcodeProjectLocations(artifactRegistry.getIdeProjectFiles(XcodeProjectMetadata.class));
return workspaceFileTask;
}
private String getBridgeTaskPath(Project project) {
String projectPath = "";
if (!isRoot()) {
projectPath = project.getPath();
}
return projectPath + ":_xcode__${ACTION}_${PRODUCT_NAME}_${CONFIGURATION}";
}
private void configureForSwiftPlugin(final Project project) {
project.getPlugins().withType(SwiftApplicationPlugin.class, new Action<SwiftApplicationPlugin>() {
@Override
public void execute(SwiftApplicationPlugin plugin) {
configureXcodeForSwift(project);
}
});
project.getPlugins().withType(SwiftLibraryPlugin.class, new Action<SwiftLibraryPlugin>() {
@Override
public void execute(SwiftLibraryPlugin plugin) {
configureXcodeForSwift(project);
}
});
project.getPlugins().withType(XCTestConventionPlugin.class, new Action<XCTestConventionPlugin>() {
@Override
public void execute(XCTestConventionPlugin plugin) {
configureXcodeForXCTest(project);
}
});
}
private void configureXcodeForXCTest(final Project project) {
project.afterEvaluate(new Action<Project>() {
@Override
public void execute(Project project) {
SwiftXCTestSuite component = project.getExtensions().getByType(SwiftXCTestSuite.class);
FileCollection sources = component.getSwiftSource();
xcodeProject.getGroups().getTests().from(sources);
String targetName = component.getModule().get();
final XcodeTarget target = newTarget(targetName, component.getModule().get(), toGradleCommand(project), getBridgeTaskPath(project), sources);
target.getSwiftSourceCompatibility().convention(component.getSourceCompatibility());
if (component.getTestBinary().isPresent()) {
target.addBinary(DefaultXcodeProject.BUILD_DEBUG, component.getTestBinary().get().getInstallDirectory(), component.getTestBinary().get().getTargetMachine().getArchitecture().getName());
target.addBinary(DefaultXcodeProject.BUILD_RELEASE, component.getTestBinary().get().getInstallDirectory(), component.getTestBinary().get().getTargetMachine().getArchitecture().getName());
target.setProductType(PBXTarget.ProductType.UNIT_TEST);
target.getCompileModules().from(component.getTestBinary().get().getCompileModules());
target.addTaskDependency(filterArtifactsFromImplicitBuilds(((DefaultSwiftBinary) component.getTestBinary().get()).getImportPathConfiguration()).getBuildDependencies());
}
component.getBinaries().whenElementFinalized(new Action<SwiftBinary>() {
@Override
public void execute(SwiftBinary swiftBinary) {
target.getSwiftSourceCompatibility().set(swiftBinary.getTargetPlatform().getSourceCompatibility());
}
});
xcodeProject.addTarget(target);
}
});
}
private FileCollection filterArtifactsFromImplicitBuilds(Configuration configuration) {
return configuration.getIncoming().artifactView(fromSourceDependency()).getArtifacts().getArtifactFiles();
}
private void configureXcodeForSwift(final Project project) {
project.afterEvaluate(new Action<Project>() {
@Override
public void execute(final Project project) {
// TODO: Assumes there's a single 'main' Swift component
final ProductionSwiftComponent component = project.getComponents().withType(ProductionSwiftComponent.class).getByName("main");
FileCollection sources = component.getSwiftSource();
xcodeProject.getGroups().getSources().from(sources);
// TODO - should use the _install_ task for an executable
final String targetName = component.getModule().get();
final XcodeTarget target = newTarget(targetName, component.getModule().get(), toGradleCommand(project), getBridgeTaskPath(project), sources);
target.getDefaultConfigurationName().set(component.getDevelopmentBinary().map(devBinary -> toBuildConfigurationName(component, devBinary)));
component.getBinaries().whenElementFinalized(new Action<SwiftBinary>() {
@Override
public void execute(SwiftBinary swiftBinary) {
if (swiftBinary instanceof SwiftExecutable) {
target.addBinary(toBuildConfigurationName(component, swiftBinary), ((SwiftExecutable) swiftBinary).getDebuggerExecutableFile(), swiftBinary.getTargetMachine().getArchitecture().getName());
target.setProductType(PBXTarget.ProductType.TOOL);
} else if (swiftBinary instanceof SwiftSharedLibrary) {
target.addBinary(toBuildConfigurationName(component, swiftBinary), ((SwiftSharedLibrary) swiftBinary).getRuntimeFile(), swiftBinary.getTargetMachine().getArchitecture().getName());
target.setProductType(PBXTarget.ProductType.DYNAMIC_LIBRARY);
} else if (swiftBinary instanceof SwiftStaticLibrary) {
target.addBinary(toBuildConfigurationName(component, swiftBinary), ((SwiftStaticLibrary) swiftBinary).getLinkFile(), swiftBinary.getTargetMachine().getArchitecture().getName());
target.setProductType(PBXTarget.ProductType.STATIC_LIBRARY);
}
target.getSwiftSourceCompatibility().set(swiftBinary.getTargetPlatform().getSourceCompatibility());
if (swiftBinary == component.getDevelopmentBinary().get()) {
target.getCompileModules().from(component.getDevelopmentBinary().get().getCompileModules());
target.addTaskDependency(filterArtifactsFromImplicitBuilds(((DefaultSwiftBinary) component.getDevelopmentBinary().get()).getImportPathConfiguration()).getBuildDependencies());
createSchemeTask(project.getTasks(), targetName, xcodeProject);
}
}
});
xcodeProject.addTarget(target);
}
});
}
private String toBuildConfigurationName(SoftwareComponent component, SoftwareComponent binary) {
String result = binary.getName().replace(component.getName(), "");
if (binary instanceof SwiftSharedLibrary || binary instanceof CppSharedLibrary) {
return result.replace("Shared", "");
} else if (binary instanceof SwiftStaticLibrary || binary instanceof CppStaticLibrary) {
return result.replace("Static", "");
}
return result;
}
private void configureForCppPlugin(final Project project) {
project.getPlugins().withType(CppApplicationPlugin.class, new Action<CppApplicationPlugin>() {
@Override
public void execute(CppApplicationPlugin plugin) {
configureXcodeForCpp(project);
}
});
project.getPlugins().withType(CppLibraryPlugin.class, new Action<CppLibraryPlugin>() {
@Override
public void execute(CppLibraryPlugin plugin) {
configureXcodeForCpp(project);
}
});
}
private void configureXcodeForCpp(Project project) {
project.afterEvaluate(new Action<Project>() {
@Override
public void execute(final Project project) {
// TODO: Assumes there's a single 'main' C++ component
final ProductionCppComponent component = project.getComponents().withType(ProductionCppComponent.class).getByName("main");
FileCollection sources = component.getCppSource();
xcodeProject.getGroups().getSources().from(sources);
FileCollection headers = component.getHeaderFiles();
xcodeProject.getGroups().getHeaders().from(headers);
// TODO - should use the _install_ task for an executable
final String targetName = StringUtils.capitalize(component.getBaseName().get());
final XcodeTarget target = newTarget(targetName, targetName, toGradleCommand(project), getBridgeTaskPath(project), sources);
target.getDefaultConfigurationName().set(component.getDevelopmentBinary().map(devBinary -> toBuildConfigurationName(component, devBinary)));
component.getBinaries().whenElementFinalized(new Action<CppBinary>() {
@Override
public void execute(CppBinary cppBinary) {
if (cppBinary instanceof CppExecutable) {
target.addBinary(toBuildConfigurationName(component, cppBinary), ((CppExecutable) cppBinary).getDebuggerExecutableFile(), cppBinary.getTargetMachine().getArchitecture().getName());
target.setProductType(PBXTarget.ProductType.TOOL);
} else if (cppBinary instanceof CppSharedLibrary) {
target.addBinary(toBuildConfigurationName(component, cppBinary), ((CppSharedLibrary) cppBinary).getRuntimeFile(), cppBinary.getTargetMachine().getArchitecture().getName());
target.setProductType(PBXTarget.ProductType.DYNAMIC_LIBRARY);
} else if (cppBinary instanceof CppStaticLibrary) {
target.addBinary(toBuildConfigurationName(component, cppBinary), ((CppStaticLibrary) cppBinary).getLinkFile(), cppBinary.getTargetMachine().getArchitecture().getName());
target.setProductType(PBXTarget.ProductType.STATIC_LIBRARY);
}
if (cppBinary == component.getDevelopmentBinary().get()) {
target.getHeaderSearchPaths().from(component.getDevelopmentBinary().get().getCompileIncludePath());
target.getTaskDependencies().add(filterArtifactsFromImplicitBuilds(((DefaultCppBinary) component.getDevelopmentBinary().get()).getIncludePathConfiguration()).getBuildDependencies());
createSchemeTask(project.getTasks(), targetName, xcodeProject);
}
}
});
target.getHeaderSearchPaths().from(component.getPrivateHeaderDirs());
if (component instanceof CppLibrary) {
target.getHeaderSearchPaths().from(((CppLibrary) component).getPublicHeaderDirs());
}
xcodeProject.addTarget(target);
}
});
}
private static GenerateSchemeFileTask createSchemeTask(TaskContainer tasks, String schemeName, DefaultXcodeProject xcodeProject) {
// TODO - capitalise the target name in the task name
// TODO - don't create a launch target for a library
String name = "xcodeScheme";
GenerateSchemeFileTask schemeFileTask = tasks.maybeCreate(name, GenerateSchemeFileTask.class);
schemeFileTask.setXcodeProject(xcodeProject);
schemeFileTask.setOutputFile(new File(xcodeProject.getLocationDir(), "xcshareddata/xcschemes/" + schemeName + ".xcscheme"));
return schemeFileTask;
}
private XcodeTarget newTarget(String name, String productName, String gradleCommand, String taskName, FileCollection sources) {
String id = gidGenerator.generateGid("PBXLegacyTarget", name.hashCode());
XcodeTarget target = objectFactory.newInstance(XcodeTarget.class, name, id);
target.setTaskName(taskName);
target.setGradleCommand(gradleCommand);
target.setProductName(productName);
target.getSources().setFrom(sources);
return target;
}
private static class XcodeBridge implements Action<String> {
private final DefaultXcodeProject xcodeProject;
private final Project project;
private final XcodePropertyAdapter xcodePropertyAdapter;
XcodeBridge(DefaultXcodeProject xcodeProject, Project project) {
this.xcodeProject = xcodeProject;
this.project = project;
this.xcodePropertyAdapter = new XcodePropertyAdapter(project);
}
@Override
public void execute(String taskName) {
if (taskName.startsWith("_xcode")) {
Task bridgeTask = project.getTasks().create(taskName);
String action = xcodePropertyAdapter.getAction();
if (action.equals("clean")) {
bridgeTask.dependsOn("clean");
} else if ("".equals(action) || "build".equals(action)) {
final XcodeTarget target = findXcodeTarget();
if (target.isUnitTest()) {
bridgeTestExecution(bridgeTask, target);
} else {
bridgeProductBuild(bridgeTask, target);
}
} else {
throw new GradleException("Unrecognized bridge action from Xcode '" + action + "'");
}
}
}
private XcodeTarget findXcodeTarget() {
final String productName = xcodePropertyAdapter.getProductName();
final XcodeTarget target = CollectionUtils.findFirst(xcodeProject.getTargets(), new Spec<XcodeTarget>() {
@Override
public boolean isSatisfiedBy(XcodeTarget target) {
return target.getProductName().equals(productName);
}
});
if (target == null) {
throw new GradleException("Unknown Xcode target '" + productName + "', do you need to re-generate Xcode configuration?");
}
return target;
}
private void bridgeProductBuild(Task bridgeTask, XcodeTarget target) {
// Library or executable
final String configuration = xcodePropertyAdapter.getConfiguration();
bridgeTask.dependsOn(target.getBinaries().stream().filter(it -> it.getBuildConfigurationName().equals(configuration)).findFirst().get().getOutputFile());
}
private void bridgeTestExecution(Task bridgeTask, final XcodeTarget target) {
// XCTest executable
// Sync the binary to the BUILT_PRODUCTS_DIR, otherwise Xcode won't find any tests
final String builtProductsPath = xcodePropertyAdapter.getBuiltProductsDir();
final Sync syncTask = project.getTasks().create("syncBundleToXcodeBuiltProductDir", Sync.class, new Action<Sync>() {
@Override
public void execute(Sync task) {
task.from(target.getDebugOutputFile());
task.into(builtProductsPath);
}
});
bridgeTask.dependsOn(syncTask);
}
}
private Action<ArtifactView.ViewConfiguration> fromSourceDependency() {
return new Action<ArtifactView.ViewConfiguration>() {
@Override
public void execute(ArtifactView.ViewConfiguration viewConfiguration) {
viewConfiguration.componentFilter(isSourceDependency());
}
};
}
private Spec<ComponentIdentifier> isSourceDependency() {
return new Spec<ComponentIdentifier>() {
@Override
public boolean isSatisfiedBy(ComponentIdentifier id) {
if (id instanceof ProjectComponentIdentifier) {
// Include as binary when the target project is not included in the workspace
return artifactRegistry.getIdeProject(XcodeProjectMetadata.class, (ProjectComponentIdentifier) id) == null;
}
return false;
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.ha.authenticator;
import java.security.Principal;
import org.apache.catalina.Container;
import org.apache.catalina.Host;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.Session;
import org.apache.catalina.SessionListener;
import org.apache.catalina.authenticator.SingleSignOn;
import org.apache.catalina.authenticator.SingleSignOnEntry;
import org.apache.catalina.ha.CatalinaCluster;
import org.apache.catalina.ha.ClusterValve;
import org.apache.catalina.tribes.Channel;
import org.apache.catalina.tribes.tipis.AbstractReplicatedMap.MapOwner;
import org.apache.catalina.tribes.tipis.ReplicatedMap;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.res.StringManager;
/**
* A <strong>Valve</strong> that supports a "single sign on" user experience on
* each nodes of a cluster, where the security identity of a user who successfully
* authenticates to one web application is propagated to other web applications and
* to other nodes cluster in the same security domain. For successful use, the following
* requirements must be met:
* <ul>
* <li>This Valve must be configured on the Container that represents a
* virtual host (typically an implementation of <code>Host</code>).</li>
* <li>The <code>Realm</code> that contains the shared user and role
* information must be configured on the same Container (or a higher
* one), and not overridden at the web application level.</li>
* <li>The web applications themselves must use one of the standard
* Authenticators found in the
* <code>org.apache.catalina.authenticator</code> package.</li>
* </ul>
*
* @author Fabien Carrion
*/
public class ClusterSingleSignOn extends SingleSignOn implements ClusterValve, MapOwner {
private static final StringManager sm = StringManager.getManager(ClusterSingleSignOn.class);
// -------------------------------------------------------------- Properties
private CatalinaCluster cluster = null;
@Override
public CatalinaCluster getCluster() { return cluster; }
@Override
public void setCluster(CatalinaCluster cluster) {
this.cluster = cluster;
}
private long rpcTimeout = 15000;
public long getRpcTimeout() {
return rpcTimeout;
}
public void setRpcTimeout(long rpcTimeout) {
this.rpcTimeout = rpcTimeout;
}
private int mapSendOptions =
Channel.SEND_OPTIONS_SYNCHRONIZED_ACK | Channel.SEND_OPTIONS_USE_ACK;
public int getMapSendOptions() {
return mapSendOptions;
}
public void setMapSendOptions(int mapSendOptions) {
this.mapSendOptions = mapSendOptions;
}
private boolean terminateOnStartFailure = false;
public boolean getTerminateOnStartFailure() {
return terminateOnStartFailure;
}
public void setTerminateOnStartFailure(boolean terminateOnStartFailure) {
this.terminateOnStartFailure = terminateOnStartFailure;
}
private long accessTimeout = 5000;
public long getAccessTimeout() {
return accessTimeout;
}
public void setAccessTimeout(long accessTimeout) {
this.accessTimeout = accessTimeout;
}
// ---------------------------------------------------- SingleSignOn Methods
@Override
protected boolean associate(String ssoId, Session session) {
boolean result = super.associate(ssoId, session);
if (result) {
((ReplicatedMap<String,SingleSignOnEntry>) cache).replicate(ssoId, true);
}
return result;
}
@Override
protected boolean update(String ssoId, Principal principal, String authType,
String username, String password) {
boolean result = super.update(ssoId, principal, authType, username, password);
if (result) {
((ReplicatedMap<String,SingleSignOnEntry>) cache).replicate(ssoId, true);
}
return result;
}
@Override
protected SessionListener getSessionListener(String ssoId) {
return new ClusterSingleSignOnListener(ssoId);
}
// -------------------------------------------------------- MapOwner Methods
@Override
public void objectMadePrimary(Object key, Object value) {
// NO-OP
}
// ------------------------------------------------------- Lifecycle Methods
/**
* Start this component and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#startInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents this component from being used
*/
@Override
protected synchronized void startInternal() throws LifecycleException {
// Load the cluster component, if any
try {
if(cluster == null) {
Container host = getContainer();
if(host instanceof Host) {
if(host.getCluster() instanceof CatalinaCluster) {
setCluster((CatalinaCluster) host.getCluster());
}
}
}
if (cluster == null) {
throw new LifecycleException(sm.getString("clusterSingleSignOn.nocluster"));
}
ClassLoader[] cls = new ClassLoader[] { this.getClass().getClassLoader() };
ReplicatedMap<String,SingleSignOnEntry> cache = new ReplicatedMap<>(
this, cluster.getChannel(), rpcTimeout, cluster.getClusterName() + "-SSO-cache",
cls, terminateOnStartFailure);
cache.setChannelSendOptions(mapSendOptions);
cache.setAccessTimeout(accessTimeout);
this.cache = cache;
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
throw new LifecycleException(sm.getString("clusterSingleSignOn.clusterLoad.fail"), t);
}
super.startInternal();
}
/**
* Stop this component and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#stopInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents this component from being used
*/
@Override
protected synchronized void stopInternal() throws LifecycleException {
super.stopInternal();
if (getCluster() != null) {
((ReplicatedMap<?,?>) cache).breakdown();
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* TODO Remove all the duplicated code, it's there for convenience right now.
*/
package com.doubtech.universalremote.widget;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.animation.TypeEvaluator;
import android.animation.ValueAnimator;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewTreeObserver;
import android.widget.Adapter;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
/**
* The dynamic listview is an extension of listview that supports cell dragging
* and swapping.
*
* This layout is in charge of positioning the hover cell in the correct location
* on the screen in response to user touch events. It uses the position of the
* hover cell to determine when two cells should be swapped. If two cells should
* be swapped, all the corresponding data set and layout changes are handled here.
*
* If no cell is selected, all the touch events are passed down to the listview
* and behave normally. If one of the items in the listview experiences a
* long press event, the contents of its current visible state are captured as
* a bitmap and its visibility is set to INVISIBLE. A hover cell is then created and
* added to this layout as an overlaying BitmapDrawable above the listview. Once the
* hover cell is translated some distance to signify an item swap, a data set change
* accompanied by animation takes place. When the user releases the hover cell,
* it animates into its corresponding position in the listview.
*
* When the hover cell is either above or below the bounds of the listview, this
* listview also scrolls on its own so as to reveal additional content.
*/
public class DynamicListView extends TwoWayListView {
public interface ISwappableAdapter {
void swap(int a, int b);
}
private final int SMOOTH_SCROLL_AMOUNT_AT_EDGE = 15;
private final int MOVE_DURATION = 150;
private final int LINE_THICKNESS = 15;
private int mLastEventY = -1;
private int mLastEventX = -1;
private int mDownY = -1;
private int mDownX = -1;
private int mTotalOffsetX = 0;
private int mTotalOffsetY = 0;
private boolean mCellIsMobile = false;
private boolean mIsMobileScrolling = false;
private int mSmoothScrollAmountAtEdge = 20;
private final int INVALID_ID = -1;
private long mAboveItemId = INVALID_ID;
private long mMobileItemId = INVALID_ID;
private long mBelowItemId = INVALID_ID;
private BitmapDrawable mHoverCell;
private Rect mHoverCellCurrentBounds;
private Rect mHoverCellOriginalBounds;
private final int INVALID_POINTER_ID = -1;
private int mActivePointerId = INVALID_POINTER_ID;
private boolean mIsWaitingForScrollFinish = false;
private int mScrollState = OnScrollListener.SCROLL_STATE_IDLE;
public DynamicListView(Context context) {
super(context);
init(context);
}
public DynamicListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
public DynamicListView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public void init(Context context) {
setOnItemLongClickListener(mOnItemLongClickListener);
setOnScrollListener(mScrollListener);
DisplayMetrics metrics = context.getResources().getDisplayMetrics();
mSmoothScrollAmountAtEdge = (int)(SMOOTH_SCROLL_AMOUNT_AT_EDGE / metrics.density);
}
/**
* Listens for long clicks on any items in the listview. When a cell has
* been selected, the hover cell is created and set up.
*/
private AdapterView.OnItemLongClickListener mOnItemLongClickListener =
new AdapterView.OnItemLongClickListener() {
public boolean onItemLongClick(AdapterView<?> arg0, View arg1, int pos, long id) {
if (getAdapter() instanceof ISwappableAdapter) {
mTotalOffsetX = 0;
mTotalOffsetY = 0;
int position = pointToPosition(mDownX, mDownY);
int itemNum = position - getFirstVisiblePosition();
View selectedView = getChildAt(itemNum);
mMobileItemId = getAdapter().getItemId(position);
mHoverCell = getAndAddHoverView(selectedView);
selectedView.setVisibility(INVISIBLE);
mCellIsMobile = true;
updateNeighborViewsForID(mMobileItemId);
return true;
}
return false;
}
};
/**
* Creates the hover cell with the appropriate bitmap and of appropriate
* size. The hover cell's BitmapDrawable is drawn on top of the bitmap every
* single time an invalidate call is made.
*/
private BitmapDrawable getAndAddHoverView(View v) {
int w = v.getWidth();
int h = v.getHeight();
int top = v.getTop();
int left = v.getLeft();
Bitmap b = getBitmapWithBorder(v);
BitmapDrawable drawable = new BitmapDrawable(getResources(), b);
mHoverCellOriginalBounds = new Rect(left, top, left + w, top + h);
mHoverCellCurrentBounds = new Rect(mHoverCellOriginalBounds);
drawable.setBounds(mHoverCellCurrentBounds);
return drawable;
}
/** Draws a black border over the screenshot of the view passed in. */
private Bitmap getBitmapWithBorder(View v) {
Bitmap bitmap = getBitmapFromView(v);
Canvas can = new Canvas(bitmap);
Rect rect = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
Paint paint = new Paint();
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth(LINE_THICKNESS);
paint.setColor(getContext().getResources().getColor(android.R.color.holo_blue_bright));
can.drawBitmap(bitmap, 0, 0, null);
can.drawRect(rect, paint);
return bitmap;
}
/** Returns a bitmap showing a screenshot of the view passed in. */
private Bitmap getBitmapFromView(View v) {
Bitmap bitmap = Bitmap.createBitmap(v.getWidth(), v.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas (bitmap);
v.draw(canvas);
return bitmap;
}
/**
* Stores a reference to the views above and below the item currently
* corresponding to the hover cell. It is important to note that if this
* item is either at the top or bottom of the list, mAboveItemId or mBelowItemId
* may be invalid.
*/
private void updateNeighborViewsForID(long itemID) {
int position = getPositionForID(itemID);
Adapter adapter = getAdapter();
mAboveItemId = adapter.getItemId(position - 1);
mBelowItemId = adapter.getItemId(position + 1);
}
/** Retrieves the view in the list corresponding to itemID */
public View getViewForID (long itemID) {
int firstVisiblePosition = getFirstVisiblePosition();
Adapter adapter = getAdapter();
for (int i = 0; i < getChildCount(); i++) {
View v = getChildAt(i);
int position = firstVisiblePosition + i;
long id = adapter.getItemId(position);
if (id == itemID) {
return v;
}
}
return null;
}
/** Retrieves the position in the list corresponding to itemID */
public int getPositionForID (long itemID) {
View v = getViewForID(itemID);
if (v == null) {
return -1;
} else {
return getPositionForView(v);
}
}
/**
* dispatchDraw gets invoked when all the child views are about to be drawn.
* By overriding this method, the hover cell (BitmapDrawable) can be drawn
* over the listview's items whenever the listview is redrawn.
*/
@Override
protected void dispatchDraw(Canvas canvas) {
super.dispatchDraw(canvas);
if (mHoverCell != null) {
mHoverCell.draw(canvas);
}
}
@Override
public boolean onTouchEvent (MotionEvent event) {
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
mDownX = (int)event.getX();
mDownY = (int)event.getY();
mActivePointerId = event.getPointerId(0);
break;
case MotionEvent.ACTION_MOVE:
if (mActivePointerId == INVALID_POINTER_ID) {
break;
}
int pointerIndex = event.findPointerIndex(mActivePointerId);
mLastEventY = (int) event.getY(pointerIndex);
mLastEventX = (int) event.getX(pointerIndex);
int deltaX = mLastEventX - mDownX;
int deltaY = mLastEventY - mDownY;
if (mCellIsMobile) {
if (isVertical()) {
mHoverCellCurrentBounds.offsetTo(mHoverCellOriginalBounds.left,
mHoverCellOriginalBounds.top + deltaY + mTotalOffsetY);
} else {
mHoverCellCurrentBounds.offsetTo(mHoverCellOriginalBounds.left + deltaX + mTotalOffsetX,
mHoverCellOriginalBounds.top);
}
mHoverCell.setBounds(mHoverCellCurrentBounds);
invalidate();
handleCellSwitch();
mIsMobileScrolling = false;
handleMobileCellScroll();
return false;
}
break;
case MotionEvent.ACTION_UP:
touchEventsEnded();
break;
case MotionEvent.ACTION_CANCEL:
touchEventsCancelled();
break;
case MotionEvent.ACTION_POINTER_UP:
/* If a multitouch event took place and the original touch dictating
* the movement of the hover cell has ended, then the dragging event
* ends and the hover cell is animated to its corresponding position
* in the listview. */
pointerIndex = (event.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >>
MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(pointerIndex);
if (pointerId == mActivePointerId) {
touchEventsEnded();
}
break;
default:
break;
}
return super.onTouchEvent(event);
}
/**
* This method determines whether the hover cell has been shifted far enough
* to invoke a cell swap. If so, then the respective cell swap candidate is
* determined and the data set is changed. Upon posting a notification of the
* data set change, a layout is invoked to place the cells in the right place.
* Using a ViewTreeObserver and a corresponding OnPreDrawListener, we can
* offset the cell being swapped to where it previously was and then animate it to
* its new position.
*/
private void handleCellSwitch() {
final int deltaY = mLastEventY - mDownY;
int deltaYTotal = mHoverCellOriginalBounds.top + mTotalOffsetY + deltaY;
final int deltaX = mLastEventX - mDownX;
int deltaXTotal = mHoverCellOriginalBounds.left + mTotalOffsetX + deltaX;
View belowView = getViewForID(mBelowItemId);
View mobileView = getViewForID(mMobileItemId);
View aboveView = getViewForID(mAboveItemId);
boolean isBelow;
boolean isAbove;
if (isVertical()) {
isBelow = (belowView != null) && (deltaYTotal > belowView.getTop());
isAbove = (aboveView != null) && (deltaYTotal < aboveView.getTop());
} else {
isBelow = (belowView != null) && (deltaXTotal > belowView.getLeft());
isAbove = (aboveView != null) && (deltaXTotal < aboveView.getLeft());
}
if (isBelow || isAbove) {
final long switchItemID = isBelow ? mBelowItemId : mAboveItemId;
View switchView = isBelow ? belowView : aboveView;
final int originalItem = getPositionForView(mobileView);
if (switchView == null) {
updateNeighborViewsForID(mMobileItemId);
return;
}
if (-1 == originalItem) {
return;
}
((ISwappableAdapter) getAdapter()).swap(originalItem, getPositionForView(switchView));
((BaseAdapter) getAdapter()).notifyDataSetChanged();
if (isVertical()) {
mDownY = mLastEventY;
} else {
mDownX = mLastEventX;
}
final int switchViewStartTop = switchView.getTop();
final int switchViewStartLeft = switchView.getLeft();
mobileView.setVisibility(View.VISIBLE);
switchView.setVisibility(View.INVISIBLE);
updateNeighborViewsForID(mMobileItemId);
final ViewTreeObserver observer = getViewTreeObserver();
observer.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
public boolean onPreDraw() {
observer.removeOnPreDrawListener(this);
View switchView = getViewForID(switchItemID);
ObjectAnimator animator;
if (isVertical()) {
mTotalOffsetY += deltaY;
int switchViewNewTop = switchView.getTop();
int delta = switchViewStartTop - switchViewNewTop;
switchView.setTranslationY(delta);
animator = ObjectAnimator.ofFloat(switchView,
View.TRANSLATION_Y, 0);
} else {
mTotalOffsetX += deltaX;
int switchViewNewLeft = switchView.getLeft();
int delta = switchViewStartLeft - switchViewNewLeft;
switchView.setTranslationX(delta);
animator = ObjectAnimator.ofFloat(switchView,
View.TRANSLATION_X, 0);
}
animator.setDuration(MOVE_DURATION);
animator.start();
return true;
}
});
}
}
/**
* Resets all the appropriate fields to a default state while also animating
* the hover cell back to its correct location.
*/
private void touchEventsEnded () {
final View mobileView = getViewForID(mMobileItemId);
if (mCellIsMobile|| mIsWaitingForScrollFinish) {
mCellIsMobile = false;
mIsWaitingForScrollFinish = false;
mIsMobileScrolling = false;
mActivePointerId = INVALID_POINTER_ID;
// If the autoscroller has not completed scrolling, we need to wait for it to
// finish in order to determine the final location of where the hover cell
// should be animated to.
if (mScrollState != OnScrollListener.SCROLL_STATE_IDLE) {
mIsWaitingForScrollFinish = true;
return;
}
if (isVertical()) {
mHoverCellCurrentBounds.offsetTo(mHoverCellOriginalBounds.left, mobileView.getTop());
} else {
mHoverCellCurrentBounds.offsetTo(mobileView.getLeft(), mHoverCellOriginalBounds.top);
}
ObjectAnimator hoverViewAnimator = ObjectAnimator.ofObject(mHoverCell, "bounds",
sBoundEvaluator, mHoverCellCurrentBounds);
hoverViewAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
invalidate();
}
});
hoverViewAnimator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
setEnabled(false);
}
@Override
public void onAnimationEnd(Animator animation) {
mAboveItemId = INVALID_ID;
mMobileItemId = INVALID_ID;
mBelowItemId = INVALID_ID;
mobileView.setVisibility(VISIBLE);
mHoverCell = null;
setEnabled(true);
invalidate();
}
});
hoverViewAnimator.start();
} else {
touchEventsCancelled();
}
}
/**
* Resets all the appropriate fields to a default state.
*/
private void touchEventsCancelled () {
View mobileView = getViewForID(mMobileItemId);
if (mCellIsMobile) {
mAboveItemId = INVALID_ID;
mMobileItemId = INVALID_ID;
mBelowItemId = INVALID_ID;
mobileView.setVisibility(VISIBLE);
mHoverCell = null;
invalidate();
}
mCellIsMobile = false;
mIsMobileScrolling = false;
mActivePointerId = INVALID_POINTER_ID;
}
/**
* This TypeEvaluator is used to animate the BitmapDrawable back to its
* final location when the user lifts his finger by modifying the
* BitmapDrawable's bounds.
*/
private final static TypeEvaluator<Rect> sBoundEvaluator = new TypeEvaluator<Rect>() {
public Rect evaluate(float fraction, Rect startValue, Rect endValue) {
return new Rect(interpolate(startValue.left, endValue.left, fraction),
interpolate(startValue.top, endValue.top, fraction),
interpolate(startValue.right, endValue.right, fraction),
interpolate(startValue.bottom, endValue.bottom, fraction));
}
public int interpolate(int start, int end, float fraction) {
return (int)(start + fraction * (end - start));
}
};
/**
* Determines whether this listview is in a scrolling state invoked
* by the fact that the hover cell is out of the bounds of the listview;
*/
private void handleMobileCellScroll() {
mIsMobileScrolling = handleMobileCellScroll(mHoverCellCurrentBounds);
}
/**
* This method is in charge of determining if the hover cell is above
* or below the bounds of the listview. If so, the listview does an appropriate
* upward or downward smooth scroll so as to reveal new items.
*/
public boolean handleMobileCellScroll(Rect r) {
if (isVertical()) {
int offset = computeVerticalScrollOffset();
int height = getHeight();
int extent = computeVerticalScrollExtent();
int range = computeVerticalScrollRange();
int hoverViewTop = r.top;
int hoverHeight = r.height();
if (hoverViewTop <= 0 && offset > 0) {
smoothScrollBy(-mSmoothScrollAmountAtEdge, 0);
return true;
}
if (hoverViewTop + hoverHeight >= height && (offset + extent) < range) {
smoothScrollBy(mSmoothScrollAmountAtEdge, 0);
return true;
}
return false;
} else {
int offset = computeHorizontalScrollOffset();
int width = getWidth();
int extent = computeHorizontalScrollExtent();
int range = computeHorizontalScrollRange();
int hoverViewLeft = r.left;
int hoverWidth = r.width();
if (hoverViewLeft <= 0 && offset > 0) {
smoothScrollBy(-mSmoothScrollAmountAtEdge, 0);
return true;
}
if (hoverViewLeft + hoverWidth >= width && (offset + extent) < range) {
smoothScrollBy(mSmoothScrollAmountAtEdge, 0);
return true;
}
return false;
}
}
private boolean isVertical() {
return getOrientation() == Orientation.VERTICAL;
}
/**
* This scroll listener is added to the listview in order to handle cell swapping
* when the cell is either at the top or bottom edge of the listview. If the hover
* cell is at either edge of the listview, the listview will begin scrolling. As
* scrolling takes place, the listview continuously checks if new cells became visible
* and determines whether they are potential candidates for a cell swap.
*/
private TwoWayListView.OnScrollListener mScrollListener = new TwoWayListView.OnScrollListener () {
private int mPreviousFirstVisibleItem = -1;
private int mPreviousVisibleItemCount = -1;
private int mCurrentFirstVisibleItem;
private int mCurrentVisibleItemCount;
private int mCurrentScrollState;
public void onScroll(TwoWayListView view, int firstVisibleItem, int visibleItemCount,
int totalItemCount) {
mCurrentFirstVisibleItem = firstVisibleItem;
mCurrentVisibleItemCount = visibleItemCount;
mPreviousFirstVisibleItem = (mPreviousFirstVisibleItem == -1) ? mCurrentFirstVisibleItem
: mPreviousFirstVisibleItem;
mPreviousVisibleItemCount = (mPreviousVisibleItemCount == -1) ? mCurrentVisibleItemCount
: mPreviousVisibleItemCount;
checkAndHandleFirstVisibleCellChange();
checkAndHandleLastVisibleCellChange();
mPreviousFirstVisibleItem = mCurrentFirstVisibleItem;
mPreviousVisibleItemCount = mCurrentVisibleItemCount;
}
@Override
public void onScrollStateChanged(TwoWayListView view, int scrollState) {
mCurrentScrollState = scrollState;
mScrollState = scrollState;
isScrollCompleted();
}
/**
* This method is in charge of invoking 1 of 2 actions. Firstly, if the listview
* is in a state of scrolling invoked by the hover cell being outside the bounds
* of the listview, then this scrolling event is continued. Secondly, if the hover
* cell has already been released, this invokes the animation for the hover cell
* to return to its correct position after the listview has entered an idle scroll
* state.
*/
private void isScrollCompleted() {
if (mCurrentVisibleItemCount > 0 && mCurrentScrollState == SCROLL_STATE_IDLE) {
if (mCellIsMobile && mIsMobileScrolling) {
handleMobileCellScroll();
} else if (mIsWaitingForScrollFinish) {
touchEventsEnded();
}
}
}
/**
* Determines if the listview scrolled up enough to reveal a new cell at the
* top of the list. If so, then the appropriate parameters are updated.
*/
public void checkAndHandleFirstVisibleCellChange() {
if (mCurrentFirstVisibleItem != mPreviousFirstVisibleItem) {
if (mCellIsMobile && mMobileItemId != INVALID_ID) {
updateNeighborViewsForID(mMobileItemId);
handleCellSwitch();
}
}
}
/**
* Determines if the listview scrolled down enough to reveal a new cell at the
* bottom of the list. If so, then the appropriate parameters are updated.
*/
public void checkAndHandleLastVisibleCellChange() {
int currentLastVisibleItem = mCurrentFirstVisibleItem + mCurrentVisibleItemCount;
int previousLastVisibleItem = mPreviousFirstVisibleItem + mPreviousVisibleItemCount;
if (currentLastVisibleItem != previousLastVisibleItem) {
if (mCellIsMobile && mMobileItemId != INVALID_ID) {
updateNeighborViewsForID(mMobileItemId);
handleCellSwitch();
}
}
}
};
}
| |
package io.grpc.testing.integration;
import static io.grpc.stub.ClientCalls.asyncUnaryCall;
import static io.grpc.stub.ClientCalls.asyncServerStreamingCall;
import static io.grpc.stub.ClientCalls.asyncClientStreamingCall;
import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ClientCalls.blockingUnaryCall;
import static io.grpc.stub.ClientCalls.blockingServerStreamingCall;
import static io.grpc.stub.ClientCalls.futureUnaryCall;
import static io.grpc.MethodDescriptor.generateFullMethodName;
import static io.grpc.stub.ServerCalls.asyncUnaryCall;
import static io.grpc.stub.ServerCalls.asyncServerStreamingCall;
import static io.grpc.stub.ServerCalls.asyncClientStreamingCall;
import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall;
@javax.annotation.Generated("by gRPC proto compiler")
public class TestServiceGrpc {
private TestServiceGrpc() {}
public static final String SERVICE_NAME = "grpc.testing.TestService";
// Static method descriptors that strictly reflect the proto.
@io.grpc.ExperimentalApi
public static final io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty,
com.google.protobuf.EmptyProtos.Empty> METHOD_EMPTY_CALL =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.UNARY,
generateFullMethodName(
"grpc.testing.TestService", "EmptyCall"),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.EmptyProtos.Empty.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.EmptyProtos.Empty.getDefaultInstance()));
@io.grpc.ExperimentalApi
public static final io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.SimpleRequest,
io.grpc.testing.integration.Messages.SimpleResponse> METHOD_UNARY_CALL =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.UNARY,
generateFullMethodName(
"grpc.testing.TestService", "UnaryCall"),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.SimpleRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.SimpleResponse.getDefaultInstance()));
@io.grpc.ExperimentalApi
public static final io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.StreamingOutputCallRequest,
io.grpc.testing.integration.Messages.StreamingOutputCallResponse> METHOD_STREAMING_OUTPUT_CALL =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING,
generateFullMethodName(
"grpc.testing.TestService", "StreamingOutputCall"),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingOutputCallRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingOutputCallResponse.getDefaultInstance()));
@io.grpc.ExperimentalApi
public static final io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.StreamingInputCallRequest,
io.grpc.testing.integration.Messages.StreamingInputCallResponse> METHOD_STREAMING_INPUT_CALL =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.CLIENT_STREAMING,
generateFullMethodName(
"grpc.testing.TestService", "StreamingInputCall"),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingInputCallRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingInputCallResponse.getDefaultInstance()));
@io.grpc.ExperimentalApi
public static final io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.StreamingOutputCallRequest,
io.grpc.testing.integration.Messages.StreamingOutputCallResponse> METHOD_FULL_DUPLEX_CALL =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING,
generateFullMethodName(
"grpc.testing.TestService", "FullDuplexCall"),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingOutputCallRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingOutputCallResponse.getDefaultInstance()));
@io.grpc.ExperimentalApi
public static final io.grpc.MethodDescriptor<io.grpc.testing.integration.Messages.StreamingOutputCallRequest,
io.grpc.testing.integration.Messages.StreamingOutputCallResponse> METHOD_HALF_DUPLEX_CALL =
io.grpc.MethodDescriptor.create(
io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING,
generateFullMethodName(
"grpc.testing.TestService", "HalfDuplexCall"),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingOutputCallRequest.getDefaultInstance()),
io.grpc.protobuf.ProtoUtils.marshaller(io.grpc.testing.integration.Messages.StreamingOutputCallResponse.getDefaultInstance()));
public static TestServiceStub newStub(io.grpc.Channel channel) {
return new TestServiceStub(channel);
}
public static TestServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
return new TestServiceBlockingStub(channel);
}
public static TestServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
return new TestServiceFutureStub(channel);
}
public static interface TestService {
public void emptyCall(com.google.protobuf.EmptyProtos.Empty request,
io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty> responseObserver);
public void unaryCall(io.grpc.testing.integration.Messages.SimpleRequest request,
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.SimpleResponse> responseObserver);
public void streamingOutputCall(io.grpc.testing.integration.Messages.StreamingOutputCallRequest request,
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> responseObserver);
public io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingInputCallRequest> streamingInputCall(
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingInputCallResponse> responseObserver);
public io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallRequest> fullDuplexCall(
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> responseObserver);
public io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallRequest> halfDuplexCall(
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> responseObserver);
}
public static interface TestServiceBlockingClient {
public com.google.protobuf.EmptyProtos.Empty emptyCall(com.google.protobuf.EmptyProtos.Empty request);
public io.grpc.testing.integration.Messages.SimpleResponse unaryCall(io.grpc.testing.integration.Messages.SimpleRequest request);
public java.util.Iterator<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> streamingOutputCall(
io.grpc.testing.integration.Messages.StreamingOutputCallRequest request);
}
public static interface TestServiceFutureClient {
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.EmptyProtos.Empty> emptyCall(
com.google.protobuf.EmptyProtos.Empty request);
public com.google.common.util.concurrent.ListenableFuture<io.grpc.testing.integration.Messages.SimpleResponse> unaryCall(
io.grpc.testing.integration.Messages.SimpleRequest request);
}
public static class TestServiceStub extends io.grpc.stub.AbstractStub<TestServiceStub>
implements TestService {
private TestServiceStub(io.grpc.Channel channel) {
super(channel);
}
private TestServiceStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected TestServiceStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new TestServiceStub(channel, callOptions);
}
@java.lang.Override
public void emptyCall(com.google.protobuf.EmptyProtos.Empty request,
io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_EMPTY_CALL, getCallOptions()), request, responseObserver);
}
@java.lang.Override
public void unaryCall(io.grpc.testing.integration.Messages.SimpleRequest request,
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.SimpleResponse> responseObserver) {
asyncUnaryCall(
getChannel().newCall(METHOD_UNARY_CALL, getCallOptions()), request, responseObserver);
}
@java.lang.Override
public void streamingOutputCall(io.grpc.testing.integration.Messages.StreamingOutputCallRequest request,
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> responseObserver) {
asyncServerStreamingCall(
getChannel().newCall(METHOD_STREAMING_OUTPUT_CALL, getCallOptions()), request, responseObserver);
}
@java.lang.Override
public io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingInputCallRequest> streamingInputCall(
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingInputCallResponse> responseObserver) {
return asyncClientStreamingCall(
getChannel().newCall(METHOD_STREAMING_INPUT_CALL, getCallOptions()), responseObserver);
}
@java.lang.Override
public io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallRequest> fullDuplexCall(
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> responseObserver) {
return asyncBidiStreamingCall(
getChannel().newCall(METHOD_FULL_DUPLEX_CALL, getCallOptions()), responseObserver);
}
@java.lang.Override
public io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallRequest> halfDuplexCall(
io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> responseObserver) {
return asyncBidiStreamingCall(
getChannel().newCall(METHOD_HALF_DUPLEX_CALL, getCallOptions()), responseObserver);
}
}
public static class TestServiceBlockingStub extends io.grpc.stub.AbstractStub<TestServiceBlockingStub>
implements TestServiceBlockingClient {
private TestServiceBlockingStub(io.grpc.Channel channel) {
super(channel);
}
private TestServiceBlockingStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected TestServiceBlockingStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new TestServiceBlockingStub(channel, callOptions);
}
@java.lang.Override
public com.google.protobuf.EmptyProtos.Empty emptyCall(com.google.protobuf.EmptyProtos.Empty request) {
return blockingUnaryCall(
getChannel(), METHOD_EMPTY_CALL, getCallOptions(), request);
}
@java.lang.Override
public io.grpc.testing.integration.Messages.SimpleResponse unaryCall(io.grpc.testing.integration.Messages.SimpleRequest request) {
return blockingUnaryCall(
getChannel(), METHOD_UNARY_CALL, getCallOptions(), request);
}
@java.lang.Override
public java.util.Iterator<io.grpc.testing.integration.Messages.StreamingOutputCallResponse> streamingOutputCall(
io.grpc.testing.integration.Messages.StreamingOutputCallRequest request) {
return blockingServerStreamingCall(
getChannel(), METHOD_STREAMING_OUTPUT_CALL, getCallOptions(), request);
}
}
public static class TestServiceFutureStub extends io.grpc.stub.AbstractStub<TestServiceFutureStub>
implements TestServiceFutureClient {
private TestServiceFutureStub(io.grpc.Channel channel) {
super(channel);
}
private TestServiceFutureStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected TestServiceFutureStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new TestServiceFutureStub(channel, callOptions);
}
@java.lang.Override
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.EmptyProtos.Empty> emptyCall(
com.google.protobuf.EmptyProtos.Empty request) {
return futureUnaryCall(
getChannel().newCall(METHOD_EMPTY_CALL, getCallOptions()), request);
}
@java.lang.Override
public com.google.common.util.concurrent.ListenableFuture<io.grpc.testing.integration.Messages.SimpleResponse> unaryCall(
io.grpc.testing.integration.Messages.SimpleRequest request) {
return futureUnaryCall(
getChannel().newCall(METHOD_UNARY_CALL, getCallOptions()), request);
}
}
private static final int METHODID_EMPTY_CALL = 0;
private static final int METHODID_UNARY_CALL = 1;
private static final int METHODID_STREAMING_OUTPUT_CALL = 2;
private static final int METHODID_STREAMING_INPUT_CALL = 3;
private static final int METHODID_FULL_DUPLEX_CALL = 4;
private static final int METHODID_HALF_DUPLEX_CALL = 5;
private static class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final TestService serviceImpl;
private final int methodId;
public MethodHandlers(TestService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_EMPTY_CALL:
serviceImpl.emptyCall((com.google.protobuf.EmptyProtos.Empty) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty>) responseObserver);
break;
case METHODID_UNARY_CALL:
serviceImpl.unaryCall((io.grpc.testing.integration.Messages.SimpleRequest) request,
(io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.SimpleResponse>) responseObserver);
break;
case METHODID_STREAMING_OUTPUT_CALL:
serviceImpl.streamingOutputCall((io.grpc.testing.integration.Messages.StreamingOutputCallRequest) request,
(io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_STREAMING_INPUT_CALL:
return (io.grpc.stub.StreamObserver<Req>) serviceImpl.streamingInputCall(
(io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingInputCallResponse>) responseObserver);
case METHODID_FULL_DUPLEX_CALL:
return (io.grpc.stub.StreamObserver<Req>) serviceImpl.fullDuplexCall(
(io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse>) responseObserver);
case METHODID_HALF_DUPLEX_CALL:
return (io.grpc.stub.StreamObserver<Req>) serviceImpl.halfDuplexCall(
(io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.StreamingOutputCallResponse>) responseObserver);
default:
throw new AssertionError();
}
}
}
public static io.grpc.ServerServiceDefinition bindService(
final TestService serviceImpl) {
return io.grpc.ServerServiceDefinition.builder(SERVICE_NAME)
.addMethod(
METHOD_EMPTY_CALL,
asyncUnaryCall(
new MethodHandlers<
com.google.protobuf.EmptyProtos.Empty,
com.google.protobuf.EmptyProtos.Empty>(
serviceImpl, METHODID_EMPTY_CALL)))
.addMethod(
METHOD_UNARY_CALL,
asyncUnaryCall(
new MethodHandlers<
io.grpc.testing.integration.Messages.SimpleRequest,
io.grpc.testing.integration.Messages.SimpleResponse>(
serviceImpl, METHODID_UNARY_CALL)))
.addMethod(
METHOD_STREAMING_OUTPUT_CALL,
asyncServerStreamingCall(
new MethodHandlers<
io.grpc.testing.integration.Messages.StreamingOutputCallRequest,
io.grpc.testing.integration.Messages.StreamingOutputCallResponse>(
serviceImpl, METHODID_STREAMING_OUTPUT_CALL)))
.addMethod(
METHOD_STREAMING_INPUT_CALL,
asyncClientStreamingCall(
new MethodHandlers<
io.grpc.testing.integration.Messages.StreamingInputCallRequest,
io.grpc.testing.integration.Messages.StreamingInputCallResponse>(
serviceImpl, METHODID_STREAMING_INPUT_CALL)))
.addMethod(
METHOD_FULL_DUPLEX_CALL,
asyncBidiStreamingCall(
new MethodHandlers<
io.grpc.testing.integration.Messages.StreamingOutputCallRequest,
io.grpc.testing.integration.Messages.StreamingOutputCallResponse>(
serviceImpl, METHODID_FULL_DUPLEX_CALL)))
.addMethod(
METHOD_HALF_DUPLEX_CALL,
asyncBidiStreamingCall(
new MethodHandlers<
io.grpc.testing.integration.Messages.StreamingOutputCallRequest,
io.grpc.testing.integration.Messages.StreamingOutputCallResponse>(
serviceImpl, METHODID_HALF_DUPLEX_CALL)))
.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.test.functional;
import static org.junit.Assert.assertEquals;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.impl.ScannerImpl;
import org.apache.accumulo.core.client.impl.Writer;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.KeyExtent;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.file.rfile.RFile;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.metadata.schema.DataFileValue;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.DataFileColumnFamily;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.util.ColumnFQ;
import org.apache.accumulo.core.zookeeper.ZooUtil;
import org.apache.accumulo.fate.zookeeper.IZooReaderWriter;
import org.apache.accumulo.fate.zookeeper.ZooLock.LockLossReason;
import org.apache.accumulo.fate.zookeeper.ZooLock.LockWatcher;
import org.apache.accumulo.fate.zookeeper.ZooUtil.NodeExistsPolicy;
import org.apache.accumulo.server.ServerConstants;
import org.apache.accumulo.server.client.HdfsZooInstance;
import org.apache.accumulo.server.fs.FileRef;
import org.apache.accumulo.server.master.state.Assignment;
import org.apache.accumulo.server.master.state.TServerInstance;
import org.apache.accumulo.server.security.SystemCredentials;
import org.apache.accumulo.server.tablets.TabletTime;
import org.apache.accumulo.server.util.FileUtil;
import org.apache.accumulo.server.util.MasterMetadataUtil;
import org.apache.accumulo.server.util.MetadataTableUtil;
import org.apache.accumulo.server.zookeeper.TransactionWatcher;
import org.apache.accumulo.server.zookeeper.ZooLock;
import org.apache.accumulo.server.zookeeper.ZooReaderWriter;
import org.apache.accumulo.tserver.TabletServer;
import org.apache.hadoop.io.Text;
import org.junit.Test;
public class SplitRecoveryIT extends ConfigurableMacIT {
@Override
protected int defaultTimeoutSeconds() {
return 30;
}
private KeyExtent nke(String table, String endRow, String prevEndRow) {
return new KeyExtent(new Text(table), endRow == null ? null : new Text(endRow), prevEndRow == null ? null : new Text(prevEndRow));
}
private void run() throws Exception {
String zPath = ZooUtil.getRoot(HdfsZooInstance.getInstance()) + "/testLock";
IZooReaderWriter zoo = ZooReaderWriter.getInstance();
zoo.putPersistentData(zPath, new byte[0], NodeExistsPolicy.OVERWRITE);
ZooLock zl = new ZooLock(zPath);
boolean gotLock = zl.tryLock(new LockWatcher() {
@Override
public void lostLock(LockLossReason reason) {
System.exit(-1);
}
@Override
public void unableToMonitorLockNode(Throwable e) {
System.exit(-1);
}
}, "foo".getBytes(StandardCharsets.UTF_8));
if (!gotLock) {
System.err.println("Failed to get lock " + zPath);
}
// run test for a table with one tablet
runSplitRecoveryTest(0, "sp", 0, zl, nke("foo0", null, null));
runSplitRecoveryTest(1, "sp", 0, zl, nke("foo1", null, null));
// run test for tables with two tablets, run test on first and last tablet
runSplitRecoveryTest(0, "k", 0, zl, nke("foo2", "m", null), nke("foo2", null, "m"));
runSplitRecoveryTest(1, "k", 0, zl, nke("foo3", "m", null), nke("foo3", null, "m"));
runSplitRecoveryTest(0, "o", 1, zl, nke("foo4", "m", null), nke("foo4", null, "m"));
runSplitRecoveryTest(1, "o", 1, zl, nke("foo5", "m", null), nke("foo5", null, "m"));
// run test for table w/ three tablets, run test on middle tablet
runSplitRecoveryTest(0, "o", 1, zl, nke("foo6", "m", null), nke("foo6", "r", "m"), nke("foo6", null, "r"));
runSplitRecoveryTest(1, "o", 1, zl, nke("foo7", "m", null), nke("foo7", "r", "m"), nke("foo7", null, "r"));
// run test for table w/ three tablets, run test on first
runSplitRecoveryTest(0, "g", 0, zl, nke("foo8", "m", null), nke("foo8", "r", "m"), nke("foo8", null, "r"));
runSplitRecoveryTest(1, "g", 0, zl, nke("foo9", "m", null), nke("foo9", "r", "m"), nke("foo9", null, "r"));
// run test for table w/ three tablets, run test on last tablet
runSplitRecoveryTest(0, "w", 2, zl, nke("fooa", "m", null), nke("fooa", "r", "m"), nke("fooa", null, "r"));
runSplitRecoveryTest(1, "w", 2, zl, nke("foob", "m", null), nke("foob", "r", "m"), nke("foob", null, "r"));
}
private void runSplitRecoveryTest(int failPoint, String mr, int extentToSplit, ZooLock zl, KeyExtent... extents) throws Exception {
Text midRow = new Text(mr);
SortedMap<FileRef,DataFileValue> splitMapFiles = null;
for (int i = 0; i < extents.length; i++) {
KeyExtent extent = extents[i];
String tdir = ServerConstants.getTablesDirs()[0] + "/" + extent.getTableId().toString() + "/dir_" + i;
MetadataTableUtil.addTablet(extent, tdir, SystemCredentials.get(), TabletTime.LOGICAL_TIME_ID, zl);
SortedMap<FileRef,DataFileValue> mapFiles = new TreeMap<FileRef,DataFileValue>();
mapFiles.put(new FileRef(tdir + "/" + RFile.EXTENSION + "_000_000"), new DataFileValue(1000017 + i, 10000 + i));
if (i == extentToSplit) {
splitMapFiles = mapFiles;
}
int tid = 0;
TransactionWatcher.ZooArbitrator.start(Constants.BULK_ARBITRATOR_TYPE, tid);
MetadataTableUtil.updateTabletDataFile(tid, extent, mapFiles, "L0", SystemCredentials.get(), zl);
}
KeyExtent extent = extents[extentToSplit];
KeyExtent high = new KeyExtent(extent.getTableId(), extent.getEndRow(), midRow);
KeyExtent low = new KeyExtent(extent.getTableId(), midRow, extent.getPrevEndRow());
splitPartiallyAndRecover(extent, high, low, .4, splitMapFiles, midRow, "localhost:1234", failPoint, zl);
}
private void splitPartiallyAndRecover(KeyExtent extent, KeyExtent high, KeyExtent low, double splitRatio, SortedMap<FileRef,DataFileValue> mapFiles,
Text midRow, String location, int steps, ZooLock zl) throws Exception {
SortedMap<FileRef,DataFileValue> lowDatafileSizes = new TreeMap<FileRef,DataFileValue>();
SortedMap<FileRef,DataFileValue> highDatafileSizes = new TreeMap<FileRef,DataFileValue>();
List<FileRef> highDatafilesToRemove = new ArrayList<FileRef>();
MetadataTableUtil.splitDatafiles(extent.getTableId(), midRow, splitRatio, new HashMap<FileRef,FileUtil.FileInfo>(), mapFiles, lowDatafileSizes,
highDatafileSizes, highDatafilesToRemove);
MetadataTableUtil.splitTablet(high, extent.getPrevEndRow(), splitRatio, SystemCredentials.get(), zl);
TServerInstance instance = new TServerInstance(location, zl.getSessionId());
Writer writer = new Writer(HdfsZooInstance.getInstance(), SystemCredentials.get(), MetadataTable.ID);
Assignment assignment = new Assignment(high, instance);
Mutation m = new Mutation(assignment.tablet.getMetadataEntry());
m.put(TabletsSection.FutureLocationColumnFamily.NAME, assignment.server.asColumnQualifier(), assignment.server.asMutationValue());
writer.update(m);
if (steps >= 1) {
Map<FileRef,Long> bulkFiles = MetadataTableUtil.getBulkFilesLoaded(SystemCredentials.get(), extent);
MasterMetadataUtil.addNewTablet(low, "/lowDir", instance, lowDatafileSizes, bulkFiles, SystemCredentials.get(), TabletTime.LOGICAL_TIME_ID + "0", -1l,
-1l, zl);
}
if (steps >= 2)
MetadataTableUtil.finishSplit(high, highDatafileSizes, highDatafilesToRemove, SystemCredentials.get(), zl);
TabletServer.verifyTabletInformation(high, instance, null, "127.0.0.1:0", zl);
if (steps >= 1) {
ensureTabletHasNoUnexpectedMetadataEntries(low, lowDatafileSizes);
ensureTabletHasNoUnexpectedMetadataEntries(high, highDatafileSizes);
Map<FileRef,Long> lowBulkFiles = MetadataTableUtil.getBulkFilesLoaded(SystemCredentials.get(), low);
Map<FileRef,Long> highBulkFiles = MetadataTableUtil.getBulkFilesLoaded(SystemCredentials.get(), high);
if (!lowBulkFiles.equals(highBulkFiles)) {
throw new Exception(" " + lowBulkFiles + " != " + highBulkFiles + " " + low + " " + high);
}
if (lowBulkFiles.size() == 0) {
throw new Exception(" no bulk files " + low);
}
} else {
ensureTabletHasNoUnexpectedMetadataEntries(extent, mapFiles);
}
}
private void ensureTabletHasNoUnexpectedMetadataEntries(KeyExtent extent, SortedMap<FileRef,DataFileValue> expectedMapFiles) throws Exception {
Scanner scanner = new ScannerImpl(HdfsZooInstance.getInstance(), SystemCredentials.get(), MetadataTable.ID, Authorizations.EMPTY);
scanner.setRange(extent.toMetadataRange());
HashSet<ColumnFQ> expectedColumns = new HashSet<ColumnFQ>();
expectedColumns.add(TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN);
expectedColumns.add(TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN);
expectedColumns.add(TabletsSection.ServerColumnFamily.TIME_COLUMN);
expectedColumns.add(TabletsSection.ServerColumnFamily.LOCK_COLUMN);
HashSet<Text> expectedColumnFamilies = new HashSet<Text>();
expectedColumnFamilies.add(DataFileColumnFamily.NAME);
expectedColumnFamilies.add(TabletsSection.FutureLocationColumnFamily.NAME);
expectedColumnFamilies.add(TabletsSection.CurrentLocationColumnFamily.NAME);
expectedColumnFamilies.add(TabletsSection.LastLocationColumnFamily.NAME);
expectedColumnFamilies.add(TabletsSection.BulkFileColumnFamily.NAME);
Iterator<Entry<Key,Value>> iter = scanner.iterator();
while (iter.hasNext()) {
Key key = iter.next().getKey();
if (!key.getRow().equals(extent.getMetadataEntry())) {
throw new Exception("Tablet " + extent + " contained unexpected " + MetadataTable.NAME + " entry " + key);
}
if (expectedColumnFamilies.contains(key.getColumnFamily())) {
continue;
}
if (expectedColumns.remove(new ColumnFQ(key))) {
continue;
}
throw new Exception("Tablet " + extent + " contained unexpected " + MetadataTable.NAME + " entry " + key);
}
System.out.println("expectedColumns " + expectedColumns);
if (expectedColumns.size() > 1 || (expectedColumns.size() == 1)) {
throw new Exception("Not all expected columns seen " + extent + " " + expectedColumns);
}
SortedMap<FileRef,DataFileValue> fixedMapFiles = MetadataTableUtil.getDataFileSizes(extent, SystemCredentials.get());
verifySame(expectedMapFiles, fixedMapFiles);
}
private void verifySame(SortedMap<FileRef,DataFileValue> datafileSizes, SortedMap<FileRef,DataFileValue> fixedDatafileSizes) throws Exception {
if (!datafileSizes.keySet().containsAll(fixedDatafileSizes.keySet()) || !fixedDatafileSizes.keySet().containsAll(datafileSizes.keySet())) {
throw new Exception("Key sets not the same " + datafileSizes.keySet() + " != " + fixedDatafileSizes.keySet());
}
for (Entry<FileRef,DataFileValue> entry : datafileSizes.entrySet()) {
DataFileValue dfv = entry.getValue();
DataFileValue otherDfv = fixedDatafileSizes.get(entry.getKey());
if (!dfv.equals(otherDfv)) {
throw new Exception(entry.getKey() + " dfv not equal " + dfv + " " + otherDfv);
}
}
}
public static void main(String[] args) throws Exception {
new SplitRecoveryIT().run();
}
@Test
public void test() throws Exception {
assertEquals(0, exec(SplitRecoveryIT.class).waitFor());
}
}
| |
package com.example.appactionvisualizer.ui.activity;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.TextUtils;
import android.text.method.LinkMovementMethod;
import android.text.style.ClickableSpan;
import android.view.View;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.example.appactionvisualizer.R;
import com.example.appactionvisualizer.constants.Constant;
import com.example.appactionvisualizer.databean.ActionType;
import com.example.appactionvisualizer.databean.AppActionProtos.Action;
import com.example.appactionvisualizer.databean.AppActionProtos.AppAction;
import com.example.appactionvisualizer.databean.AppActionProtos.EntitySet;
import com.example.appactionvisualizer.databean.AppActionProtos.FulfillmentOption;
import com.example.appactionvisualizer.ui.activity.parameter.InputParameterActivity;
import com.example.appactionvisualizer.ui.activity.parameter.LocationActivity;
import com.example.appactionvisualizer.utils.AppUtils;
import com.example.appactionvisualizer.utils.StringUtils;
import com.google.protobuf.ListValue;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static com.example.appactionvisualizer.constants.Constant.URL_PARAMETER_INDICATOR;
import static com.example.appactionvisualizer.databean.AppActionProtos.FulfillmentOption.FulfillmentMode.DEEPLINK;
/**
* Activity for users to select parameters. Use recyclerview since the typical number of parameters
* is unknown yet.
*/
public class ParameterActivity extends CustomActivity {
private static final String TAG = "Parameter";
private FulfillmentOption fulfillmentOption;
private Action action;
private AppAction appAction;
private String urlTemplate;
private TextView tvUrlTemplate, entityItemView, link;
@Override
protected void initData() {
Intent intent = getIntent();
fulfillmentOption =
(FulfillmentOption) intent.getSerializableExtra(Constant.FULFILLMENT_OPTION);
action = (Action) intent.getSerializableExtra(Constant.ACTION);
appAction = (AppAction) intent.getSerializableExtra(Constant.APP_NAME);
if (fulfillmentOption != null) urlTemplate = fulfillmentOption.getUrlTemplate().getTemplate();
}
@Override
protected void initView() {
super.initView();
tvUrlTemplate = findViewById(R.id.url_template);
entityItemView = findViewById(R.id.url);
link = findViewById(R.id.link);
if (fulfillmentOption == null) {
AppUtils.showMsg(getString(R.string.error_unknown), ParameterActivity.this);
return;
}
setReferenceLink();
initClickableText();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_parameter);
initData();
initView();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_CANCELED) {
return;
}
switch (requestCode) {
case Constant.INPUT_PARAMETER:
replaceParameter(data);
break;
case Constant.SELECT_ADDRESS:
replaceAddressParameter(data);
break;
}
}
// Set a reference to corresponding official page
private void setReferenceLink() {
String intentName = action.getIntentName();
String title = intentName.substring(intentName.lastIndexOf('.') + 1);
Objects.requireNonNull(getSupportActionBar()).setTitle(title);
String intentUrl = title.toLowerCase().replaceAll("_", "-");
String linkString =
getString(
R.string.url_action_prefix,
ActionType.getActionTypeByName(intentName).getUrl(),
intentUrl);
setClickableTextToWeb(link, linkString);
}
/**
* set the ways of selecting parameter currently 2 ways: 1. user inputs arbitrary text/select from
* list 2. select two addresses(for taxi reservation action intent)
*/
private void initClickableText() {
if (urlTemplate.isEmpty()) return;
if (fulfillmentOption.getFulfillmentMode() != DEEPLINK) {
AppUtils.showMsg(getString(R.string.error_not_deeplink), this);
tvUrlTemplate.setText(urlTemplate);
return;
}
SpannableString ss = new SpannableString(urlTemplate);
if (action.getIntentName().equals(getString(R.string.create_taxi))) {
setLocationParameter(ss);
} else if (urlTemplate.contains(Constant.URL_NO_LINK)) {
setUrlParameter(ss);
} else {
setMappingParameter(ss);
}
tvUrlTemplate.setText(ss);
tvUrlTemplate.setMovementMethod(LinkMovementMethod.getInstance());
}
// For the @url case, just pop up a window for user to choose. No need to jump to next page
private void setUrlParameter(SpannableString ss) {
if (fulfillmentOption.getUrlTemplate().getParameterMapCount() > 0
|| !action.getParameters(0).getEntitySetReference(0).getUrlFilter().isEmpty()) {
AppUtils.showMsg(getString(R.string.error_filter), this);
return;
}
final EntitySet entitySet = AppUtils.checkUrlEntitySet(appAction, action);
if (entitySet == null) {
AppUtils.showMsg(getString(R.string.error_parsing), this);
return;
}
ClickableSpan clickable =
new ClickableSpan() {
@Override
public void onClick(@NonNull View view) {
final ListValue entityItemValue =
entitySet.getItemList().getFieldsOrThrow(Constant.ENTITY_ITEM_LIST).getListValue();
DialogInterface.OnClickListener listener =
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
Struct item = entityItemValue.getValues(i).getStructValue();
String entityUrl = item.getFieldsOrThrow(Constant.ENTITY_URL).getStringValue();
setClickableText(entityItemView, entityUrl);
}
};
List<CharSequence> names = new ArrayList<>();
// Set the list contents from listvalue's names
for (Value entity : entityItemValue.getValuesList()) {
names.add(
entity
.getStructValue()
.getFieldsOrThrow(Constant.ENTITY_FIELD_NAME)
.getStringValue());
}
String title =
entitySet
.getItemList()
.getFieldsOrThrow(Constant.ENTITY_FIELD_IDENTIFIER)
.getStringValue();
AppUtils.popUpDialog(ParameterActivity.this, title, names, listener);
}
};
ss.setSpan(clickable, 0, urlTemplate.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
// The create_taxi intent needs latitude and longitude values for parameters
private void setLocationParameter(SpannableString ss) {
ClickableSpan clickable =
new ClickableSpan() {
@Override
public void onClick(@NonNull View view) {
Intent intent = new Intent(ParameterActivity.this, LocationActivity.class);
startActivityForResult(intent, Constant.SELECT_ADDRESS);
}
};
int start = urlTemplate.indexOf(URL_PARAMETER_INDICATOR);
int end = urlTemplate.length();
if (start == -1) return;
ss.setSpan(clickable, start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
// Set parameters for the fulfillment option
private void setMappingParameter(SpannableString ss) {
final Map<String, String> parameterMapMap =
fulfillmentOption.getUrlTemplate().getParameterMapMap();
for (final Map.Entry<String, String> entry : parameterMapMap.entrySet()) {
final String key = entry.getKey();
ClickableSpan clickable =
new ClickableSpan() {
@Override
public void onClick(@NonNull View view) {
Intent intent = new Intent(ParameterActivity.this, InputParameterActivity.class);
intent.putExtra(Constant.FULFILLMENT_OPTION, fulfillmentOption);
intent.putExtra(Constant.ACTION, action);
intent.putExtra(Constant.APP_NAME, appAction);
startActivityForResult(intent, Constant.INPUT_PARAMETER);
}
};
int start = urlTemplate.indexOf(key, urlTemplate.indexOf(URL_PARAMETER_INDICATOR));
int end = start + key.length();
ss.setSpan(clickable, start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
/**
* set the constructed deeplink to a specific text view and set on-click jump logic
*
* @param display the text view to display the url
* @param curUrl the constructed url
*/
private void setClickableText(final TextView display, final String curUrl) {
display.setText(curUrl);
display.setTextColor(getResources().getColor(R.color.colorAccent));
display.setCompoundDrawablesWithIntrinsicBounds(android.R.drawable.ic_menu_set_as, 0, 0, 0);
display.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View view) {
AppUtils.jumpToApp(ParameterActivity.this, curUrl, appAction.getPackageName());
}
});
}
/**
* set the web page url to a specific text view and set jump logic
*
* @param display the text view to display the url
* @param curUrl the web page url
*/
private void setClickableTextToWeb(final TextView display, final String curUrl) {
display.setText(curUrl);
display.setTextColor(getResources().getColor(R.color.colorAccent));
display.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View view) {
AppUtils.jumpToWebPage(ParameterActivity.this, curUrl);
}
});
}
/** @param data intent data received from LocationActivity construct the url */
void replaceAddressParameter(Intent data) {
int idx = urlTemplate.indexOf(URL_PARAMETER_INDICATOR);
String curUrl = urlTemplate.substring(0, idx) + urlTemplate.charAt(idx + 1);
List<String> parameters = new ArrayList<>();
for (Map.Entry<String, String> entry :
fulfillmentOption.getUrlTemplate().getParameterMapMap().entrySet()) {
AppUtils.addLocationParameters(
this,
entry,
parameters,
data.getStringExtra(Constant.PICK_UP_LATITUDE),
data.getStringExtra(Constant.PICK_UP_LONGITUDE),
data.getStringExtra(Constant.DROP_OFF_LATITUDE),
data.getStringExtra(Constant.DROP_OFF_LONGITUDE));
}
curUrl += TextUtils.join("&", parameters);
setClickableText(entityItemView, curUrl);
}
/**
* Replace each parameter with input from user to construct the url e.g.:
* https://example.com/test{?foo,bar} ==> https://example.com/test?foo=123&bar=456 *
* https://example.com/test?utm_campaign=appactions{&foo,bar} ==> *
* https://example.com/test?utm_campaign=appactions&foo=123&bar=456
*
* @param data intent data received from selectActivity
*/
private void replaceParameter(Intent data) {
if (fulfillmentOption.getUrlTemplate().getParameterMapCount() == 1) {
String key =
fulfillmentOption.getUrlTemplate().getParameterMapMap().keySet().iterator().next();
String curUrl =
StringUtils.replaceSingleParameter(this, urlTemplate, key, data.getStringExtra(key));
setClickableText(entityItemView, curUrl);
return;
}
int firstPartIdx = urlTemplate.indexOf(URL_PARAMETER_INDICATOR);
int secondPartIdx = urlTemplate.indexOf("}");
String curUrl = urlTemplate.substring(0, firstPartIdx) + urlTemplate.charAt(firstPartIdx + 1);
List<String> parameters = new ArrayList<>();
for (final String key : fulfillmentOption.getUrlTemplate().getParameterMapMap().keySet()) {
String value = data.getStringExtra(key);
parameters.add(getString(R.string.url_parameter, key, value));
}
curUrl += TextUtils.join("&", parameters);
curUrl += urlTemplate.substring(secondPartIdx + 1);
setClickableText(entityItemView, curUrl);
}
}
| |
/* Copyright (c) 2015 Craig MacFarlane
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of Craig MacFarlane nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
package com.qualcomm.ftcrobotcontroller.opmodes;
import com.qualcomm.hardware.MatrixDcMotorController;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorController;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.hardware.ServoController;
import com.qualcomm.robotcore.util.ElapsedTime;
import java.util.HashSet;
import java.util.Set;
/**
* A simple example of all motors and servos oscillating
*/
public class MatrixControllerDemo extends OpMode {
private ElapsedTime motorOscTimer = new ElapsedTime(0);
private ElapsedTime servoOscTimer = new ElapsedTime(0);
private ElapsedTime spamPrevention = new ElapsedTime(0);
private DcMotor motor1;
private DcMotor motor2;
private DcMotor motor3;
private DcMotor motor4;
private Set<DcMotor> motorSet = new HashSet<DcMotor>();
private Servo servo1;
private Servo servo2;
private Servo servo3;
private Servo servo4;
private MatrixDcMotorController mc;
private ServoController sc;
private boolean loopOnce = false;
private boolean firstMotors = true;
private boolean firstServos = true;
private boolean firstBattery = true;
private int battery;
private final static double MOTOR_OSC_FREQ = 2.0;
private final static double SERVO_OSC_FREQ = 1.0;
private final static double SPAM_PREVENTION_FREQ = 1.0;
private double motorPower = 1.0;
private double servoPosition = 0.0;
@Override
public void init()
{
motor1 = hardwareMap.dcMotor.get("motor_1");
motor2 = hardwareMap.dcMotor.get("motor_2");
motor3 = hardwareMap.dcMotor.get("motor_3");
motor4 = hardwareMap.dcMotor.get("motor_4");
/*
* A set of motors to use with the Matrix motor controller's
* pending feature. See example below. Note that this is
* completely optional.
*/
motorSet.add(motor1);
motorSet.add(motor2);
motorSet.add(motor3);
motorSet.add(motor4);
servo1 = hardwareMap.servo.get("servo_1");
servo2 = hardwareMap.servo.get("servo_2");
servo3 = hardwareMap.servo.get("servo_3");
servo4 = hardwareMap.servo.get("servo_4");
/*
* Matrix controllers are special.
*
* A Matrix controller is one controller with both motors and servos
* but software wants to treat it as two distinct controllers, one
* DcMotorController, and one ServoController.
*
* We accomplish this by appending Motor and Servo to the name
* given in the configuration. In the example below the name
* of the controller is "MatrixController" so the motor controller
* instance is "MatrixControllerMotor" and the servo controller
* instance is "MatrixControllerServo".
*/
mc = (MatrixDcMotorController)hardwareMap.dcMotorController.get("MatrixController");
motor1.setMode(DcMotorController.RunMode.RUN_USING_ENCODERS);
motor2.setMode(DcMotorController.RunMode.RUN_USING_ENCODERS);
motor3.setMode(DcMotorController.RunMode.RUN_USING_ENCODERS);
motor4.setMode(DcMotorController.RunMode.RUN_USING_ENCODERS);
/*
* Servos are not enabled by default. Software must call pwmEnable()
* for servos to function.
*/
sc = hardwareMap.servoController.get("MatrixController");
sc.pwmEnable();
}
@Override
public void start()
{
motorOscTimer.reset();
servoOscTimer.reset();
spamPrevention.reset();
}
@Override
public void stop()
{
/*
* An example of setting power for individual motors as normal.
*
* For the Matrix controller, the methods take effect immediately
* as each call to setPower(), or any other method that interacts
* with the controller, is transformed into an i2c transaction and
* queued. A separate thread is processing the queue.
*
* In practice this means that the first call to setPower will
* be applied 20 to 40 milliseconds before the last call as the
* processing thread works through the queue. Testing
* has shown that this latency is not large enough to have any
* real world negative impacts, however teams may choose to use
* the controller's setMotorPower() method if they desire precise
* simultaneous motor operations. See example in handleMotors().
*/
motor1.setPower(0.0);
motor2.setPower(0.0);
motor3.setPower(0.0);
motor4.setPower(0.0);
sc.pwmDisable();
}
/*
* handleMotors
*
* Oscillate the motors.
*/
protected void handleMotors()
{
if ((firstMotors) || (motorOscTimer.time() > MOTOR_OSC_FREQ)) {
motorPower = -motorPower;
/*
* The MatrixDcMotorController's setMotorPower() method may take
* a collection of motors. If this is chosen, then the controller will
* set a pending bit. The pending bit tells the controller to
* defer turning on, or changing the current set point, for a motor
* until the pending bit is cleared.
*
* When the pending bit is cleared all motor power values are applied
* simultaneously. setMotorPower() handles the pending bit for you.
*/
mc.setMotorPower(motorSet, motorPower);
motorOscTimer.reset();
firstMotors = false;
}
}
/*
* handleServos
*
* Oscillate the servos.
*/
protected void handleServos()
{
if ((firstServos) || (servoOscTimer.time() > SERVO_OSC_FREQ)) {
if (servoPosition == 0.0) {
servoPosition = 1.0;
} else {
servoPosition = 0.0;
}
servo1.setPosition(servoPosition);
servo2.setPosition(servoPosition);
servo3.setPosition(servoPosition);
servo4.setPosition(servoPosition);
servoOscTimer.reset();
firstServos = false;
}
}
/*
* handleBattery
*
* The Matrix controller has a separate battery whose voltage can be read.
*/
protected void handleBattery()
{
if ((firstBattery) || (spamPrevention.time() > SPAM_PREVENTION_FREQ)) {
battery = mc.getBattery();
spamPrevention.reset();
firstBattery = false;
}
telemetry.addData("Battery: ", ((float)battery/1000));
}
@Override
public void loop()
{
handleMotors();
handleServos();
handleBattery();
}
}
| |
/*
* Copyright 2009 Martin Grotzke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package de.javakaffee.web.msm;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import de.javakaffee.web.msm.NodeAvailabilityCache.CacheLoader;
/**
* Tests the {@link NodeIdService}.
*
* @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a>
*/
public class NodeIdServiceTest {
@Test
public void testSetNodeAvailability() {
final String nodeId1 = "n1";
final CacheLoader<String> cacheLoader = new CacheLoader<String>() {
@Override
public boolean isNodeAvailable( final String key ) {
return true;
}
};
final NodeIdService cut = new NodeIdService( new NodeAvailabilityCache<String>( 10, 100, cacheLoader ),
NodeIdList.create( nodeId1 ), Collections.<String> emptyList() );
Assert.assertTrue( cut.isNodeAvailable( nodeId1 ) );
cut.setNodeAvailable( nodeId1, false );
Assert.assertFalse( cut.isNodeAvailable( nodeId1 ) );
cut.setNodeAvailable( nodeId1, true );
Assert.assertTrue( cut.isNodeAvailable( nodeId1 ) );
}
@Test
public final void testGetNextNodeId_SingleNode() {
final CacheLoader<String> cacheLoader = new DummyCacheLoader( null );
final NodeIdService cut = new NodeIdService( new NodeAvailabilityCache<String>( 10, 100, cacheLoader ),
NodeIdList.create( "n1" ), null );
final String actual = cut.getAvailableNodeId( "n1" );
assertNull( actual, "For a sole existing node we cannot get a next node" );
}
/**
* Test two memcached nodes:
* - node n1 is the currently used node, which failed
* - node n2 must be the next node
*
* Also test that if the current node is n2, then n1 must be chosen.
*/
@Test
public final void testGetNextNodeId_TwoNodes() {
final String nodeId1 = "n1";
final String nodeId2 = "n2";
final CacheLoader<String> cacheLoader = new DummyCacheLoader( null );
final NodeIdService cut = new NodeIdService( new NodeAvailabilityCache<String>( 10, 100, cacheLoader ),
NodeIdList.create( nodeId1, nodeId2 ), null );
String actual = cut.getAvailableNodeId( nodeId1 );
assertEquals( nodeId2, actual );
/* let's switch nodes, so that the session is bound to node 2
*/
actual = cut.getAvailableNodeId( nodeId2 );
assertEquals( nodeId1, actual );
}
/**
* Test two memcached nodes:
* - node n2 is the currently used node, which failed
* - node n1 is also unavailable
* - the result must be null
*/
@Test
public final void testGetNextNodeId_TwoNodes_NoNodeLeft() {
final String nodeId1 = "n1";
final String nodeId2 = "n2";
final CacheLoader<String> cacheLoader = new DummyCacheLoader( Arrays.asList( nodeId1 ) );
final NodeIdService cut = new NodeIdService( new NodeAvailabilityCache<String>( 10, 100, cacheLoader ),
NodeIdList.create( nodeId1, nodeId2 ), null );
final String actual = cut.getAvailableNodeId( nodeId2 );
assertNull( actual );
}
/**
* Test two memcached nodes with no regular nodes left, so that a failover
* node is chosen
*/
@Test
public final void testGetNextNodeId_RegularNode_NoRegularNodeLeft() {
final String nodeId1 = "n1";
final String nodeId2 = "n2";
final NodeIdService cut = new NodeIdService( createNodeAvailabilityCache(),
NodeIdList.create( nodeId1 ), Arrays.asList( nodeId2 ) );
final String actual = cut.getAvailableNodeId( nodeId1 );
assertEquals( nodeId2, actual, "The failover node is not chosen" );
}
/**
* Test two memcached nodes:
* - with the current node beeing a failover node
* - regular nodes present
*
* A regular node shall be chosen
*/
@Test
public final void testGetNextNodeId_FailoverNode_RegularNodeLeft() {
final String nodeId1 = "n1";
final String nodeId2 = "n2";
final NodeIdService cut = new NodeIdService( createNodeAvailabilityCache(),
NodeIdList.create( nodeId1 ), Arrays.asList( nodeId2 ) );
final String actual = cut.getAvailableNodeId( nodeId2 );
assertEquals( nodeId1, actual, "The regular node is not chosen" );
}
/**
* Test two memcached nodes:
* - with the current node beeing a failover node
* - no regular nodes left
*
* no node can be chosen
*/
@Test
public final void testGetNextNodeId_FailoverNode_NoRegularNodeLeft() {
final String nodeId1 = "n1";
final String nodeId2 = "n2";
final NodeIdService cut = new NodeIdService( createNodeAvailabilityCache( nodeId1 ),
NodeIdList.create( nodeId1 ), Arrays.asList( nodeId2 ) );
final String actual = cut.getAvailableNodeId( nodeId2 );
assertNull( actual );
}
/**
* Test three memcached nodes:
* - with the current node beeing the first failover node
* - no regular nodes left
* - another failover node left
*
* the second failover node must be chosen
*/
@Test
public final void testGetNextNodeId_FailoverNode_NoRegularNodeButAnotherFailoverNodeLeft() {
final String nodeId1 = "n1";
final String nodeId2 = "n2";
final String nodeId3 = "n3";
final NodeIdService cut = new NodeIdService( createNodeAvailabilityCache( nodeId1 ),
NodeIdList.create( nodeId1 ), Arrays.asList( nodeId2, nodeId3 ) );
final String actual = cut.getAvailableNodeId( nodeId2 );
assertEquals( nodeId3, actual, "The second failover node is not chosen" );
}
private NodeAvailabilityCache<String> createNodeAvailabilityCache( final String ... unavailableNodes ) {
final List<String> unavailable = unavailableNodes != null ? Arrays.asList( unavailableNodes ) : null;
return new NodeAvailabilityCache<String>( 10, 100, new DummyCacheLoader( unavailable ) );
}
private static final class DummyCacheLoader implements CacheLoader<String> {
private final List<String> _unavailable;
private DummyCacheLoader( final List<String> unavailable ) {
_unavailable = unavailable;
}
@Override
public boolean isNodeAvailable( final String key ) {
return _unavailable == null || !_unavailable.contains( key );
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/photos/library/v1/photos_library.proto
package com.google.photos.library.v1.proto;
/**
*
*
* <pre>
* Request to create one or more media items in a user's Google Photos library.
* If an `albumid` is specified, the media items are also added to that album.
* `albumPosition` is optional and can only be specified if an `albumId` is set.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.BatchCreateMediaItemsRequest}
*/
public final class BatchCreateMediaItemsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.photos.library.v1.BatchCreateMediaItemsRequest)
BatchCreateMediaItemsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreateMediaItemsRequest.newBuilder() to construct.
private BatchCreateMediaItemsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreateMediaItemsRequest() {
albumId_ = "";
newMediaItems_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreateMediaItemsRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private BatchCreateMediaItemsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
albumId_ = s;
break;
}
case 18:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
newMediaItems_ =
new java.util.ArrayList<com.google.photos.library.v1.proto.NewMediaItem>();
mutable_bitField0_ |= 0x00000001;
}
newMediaItems_.add(
input.readMessage(
com.google.photos.library.v1.proto.NewMediaItem.parser(), extensionRegistry));
break;
}
case 34:
{
com.google.photos.library.v1.proto.AlbumPosition.Builder subBuilder = null;
if (albumPosition_ != null) {
subBuilder = albumPosition_.toBuilder();
}
albumPosition_ =
input.readMessage(
com.google.photos.library.v1.proto.AlbumPosition.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(albumPosition_);
albumPosition_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
newMediaItems_ = java.util.Collections.unmodifiableList(newMediaItems_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_BatchCreateMediaItemsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_BatchCreateMediaItemsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest.class,
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest.Builder.class);
}
public static final int ALBUM_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object albumId_;
/**
*
*
* <pre>
* Identifier of the album where the media items are added. The media items
* are also added to the user's library. This is an optional field.
* </pre>
*
* <code>string album_id = 1;</code>
*
* @return The albumId.
*/
@java.lang.Override
public java.lang.String getAlbumId() {
java.lang.Object ref = albumId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
albumId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Identifier of the album where the media items are added. The media items
* are also added to the user's library. This is an optional field.
* </pre>
*
* <code>string album_id = 1;</code>
*
* @return The bytes for albumId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAlbumIdBytes() {
java.lang.Object ref = albumId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
albumId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NEW_MEDIA_ITEMS_FIELD_NUMBER = 2;
private java.util.List<com.google.photos.library.v1.proto.NewMediaItem> newMediaItems_;
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.photos.library.v1.proto.NewMediaItem> getNewMediaItemsList() {
return newMediaItems_;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.photos.library.v1.proto.NewMediaItemOrBuilder>
getNewMediaItemsOrBuilderList() {
return newMediaItems_;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public int getNewMediaItemsCount() {
return newMediaItems_.size();
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.photos.library.v1.proto.NewMediaItem getNewMediaItems(int index) {
return newMediaItems_.get(index);
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.photos.library.v1.proto.NewMediaItemOrBuilder getNewMediaItemsOrBuilder(
int index) {
return newMediaItems_.get(index);
}
public static final int ALBUM_POSITION_FIELD_NUMBER = 4;
private com.google.photos.library.v1.proto.AlbumPosition albumPosition_;
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*
* @return Whether the albumPosition field is set.
*/
@java.lang.Override
public boolean hasAlbumPosition() {
return albumPosition_ != null;
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*
* @return The albumPosition.
*/
@java.lang.Override
public com.google.photos.library.v1.proto.AlbumPosition getAlbumPosition() {
return albumPosition_ == null
? com.google.photos.library.v1.proto.AlbumPosition.getDefaultInstance()
: albumPosition_;
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
@java.lang.Override
public com.google.photos.library.v1.proto.AlbumPositionOrBuilder getAlbumPositionOrBuilder() {
return getAlbumPosition();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(albumId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, albumId_);
}
for (int i = 0; i < newMediaItems_.size(); i++) {
output.writeMessage(2, newMediaItems_.get(i));
}
if (albumPosition_ != null) {
output.writeMessage(4, getAlbumPosition());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(albumId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, albumId_);
}
for (int i = 0; i < newMediaItems_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, newMediaItems_.get(i));
}
if (albumPosition_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getAlbumPosition());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest)) {
return super.equals(obj);
}
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest other =
(com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest) obj;
if (!getAlbumId().equals(other.getAlbumId())) return false;
if (!getNewMediaItemsList().equals(other.getNewMediaItemsList())) return false;
if (hasAlbumPosition() != other.hasAlbumPosition()) return false;
if (hasAlbumPosition()) {
if (!getAlbumPosition().equals(other.getAlbumPosition())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ALBUM_ID_FIELD_NUMBER;
hash = (53 * hash) + getAlbumId().hashCode();
if (getNewMediaItemsCount() > 0) {
hash = (37 * hash) + NEW_MEDIA_ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getNewMediaItemsList().hashCode();
}
if (hasAlbumPosition()) {
hash = (37 * hash) + ALBUM_POSITION_FIELD_NUMBER;
hash = (53 * hash) + getAlbumPosition().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to create one or more media items in a user's Google Photos library.
* If an `albumid` is specified, the media items are also added to that album.
* `albumPosition` is optional and can only be specified if an `albumId` is set.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.BatchCreateMediaItemsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.photos.library.v1.BatchCreateMediaItemsRequest)
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_BatchCreateMediaItemsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_BatchCreateMediaItemsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest.class,
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest.Builder.class);
}
// Construct using com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getNewMediaItemsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
albumId_ = "";
if (newMediaItemsBuilder_ == null) {
newMediaItems_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
newMediaItemsBuilder_.clear();
}
if (albumPositionBuilder_ == null) {
albumPosition_ = null;
} else {
albumPosition_ = null;
albumPositionBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_BatchCreateMediaItemsRequest_descriptor;
}
@java.lang.Override
public com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest
getDefaultInstanceForType() {
return com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest build() {
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest buildPartial() {
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest result =
new com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest(this);
int from_bitField0_ = bitField0_;
result.albumId_ = albumId_;
if (newMediaItemsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
newMediaItems_ = java.util.Collections.unmodifiableList(newMediaItems_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.newMediaItems_ = newMediaItems_;
} else {
result.newMediaItems_ = newMediaItemsBuilder_.build();
}
if (albumPositionBuilder_ == null) {
result.albumPosition_ = albumPosition_;
} else {
result.albumPosition_ = albumPositionBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest) {
return mergeFrom((com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest other) {
if (other
== com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest.getDefaultInstance())
return this;
if (!other.getAlbumId().isEmpty()) {
albumId_ = other.albumId_;
onChanged();
}
if (newMediaItemsBuilder_ == null) {
if (!other.newMediaItems_.isEmpty()) {
if (newMediaItems_.isEmpty()) {
newMediaItems_ = other.newMediaItems_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNewMediaItemsIsMutable();
newMediaItems_.addAll(other.newMediaItems_);
}
onChanged();
}
} else {
if (!other.newMediaItems_.isEmpty()) {
if (newMediaItemsBuilder_.isEmpty()) {
newMediaItemsBuilder_.dispose();
newMediaItemsBuilder_ = null;
newMediaItems_ = other.newMediaItems_;
bitField0_ = (bitField0_ & ~0x00000001);
newMediaItemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getNewMediaItemsFieldBuilder()
: null;
} else {
newMediaItemsBuilder_.addAllMessages(other.newMediaItems_);
}
}
}
if (other.hasAlbumPosition()) {
mergeAlbumPosition(other.getAlbumPosition());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object albumId_ = "";
/**
*
*
* <pre>
* Identifier of the album where the media items are added. The media items
* are also added to the user's library. This is an optional field.
* </pre>
*
* <code>string album_id = 1;</code>
*
* @return The albumId.
*/
public java.lang.String getAlbumId() {
java.lang.Object ref = albumId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
albumId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Identifier of the album where the media items are added. The media items
* are also added to the user's library. This is an optional field.
* </pre>
*
* <code>string album_id = 1;</code>
*
* @return The bytes for albumId.
*/
public com.google.protobuf.ByteString getAlbumIdBytes() {
java.lang.Object ref = albumId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
albumId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Identifier of the album where the media items are added. The media items
* are also added to the user's library. This is an optional field.
* </pre>
*
* <code>string album_id = 1;</code>
*
* @param value The albumId to set.
* @return This builder for chaining.
*/
public Builder setAlbumId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
albumId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Identifier of the album where the media items are added. The media items
* are also added to the user's library. This is an optional field.
* </pre>
*
* <code>string album_id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearAlbumId() {
albumId_ = getDefaultInstance().getAlbumId();
onChanged();
return this;
}
/**
*
*
* <pre>
* Identifier of the album where the media items are added. The media items
* are also added to the user's library. This is an optional field.
* </pre>
*
* <code>string album_id = 1;</code>
*
* @param value The bytes for albumId to set.
* @return This builder for chaining.
*/
public Builder setAlbumIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
albumId_ = value;
onChanged();
return this;
}
private java.util.List<com.google.photos.library.v1.proto.NewMediaItem> newMediaItems_ =
java.util.Collections.emptyList();
private void ensureNewMediaItemsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
newMediaItems_ =
new java.util.ArrayList<com.google.photos.library.v1.proto.NewMediaItem>(
newMediaItems_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.library.v1.proto.NewMediaItem,
com.google.photos.library.v1.proto.NewMediaItem.Builder,
com.google.photos.library.v1.proto.NewMediaItemOrBuilder>
newMediaItemsBuilder_;
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.photos.library.v1.proto.NewMediaItem> getNewMediaItemsList() {
if (newMediaItemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(newMediaItems_);
} else {
return newMediaItemsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public int getNewMediaItemsCount() {
if (newMediaItemsBuilder_ == null) {
return newMediaItems_.size();
} else {
return newMediaItemsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.photos.library.v1.proto.NewMediaItem getNewMediaItems(int index) {
if (newMediaItemsBuilder_ == null) {
return newMediaItems_.get(index);
} else {
return newMediaItemsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNewMediaItems(
int index, com.google.photos.library.v1.proto.NewMediaItem value) {
if (newMediaItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNewMediaItemsIsMutable();
newMediaItems_.set(index, value);
onChanged();
} else {
newMediaItemsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNewMediaItems(
int index, com.google.photos.library.v1.proto.NewMediaItem.Builder builderForValue) {
if (newMediaItemsBuilder_ == null) {
ensureNewMediaItemsIsMutable();
newMediaItems_.set(index, builderForValue.build());
onChanged();
} else {
newMediaItemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNewMediaItems(com.google.photos.library.v1.proto.NewMediaItem value) {
if (newMediaItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNewMediaItemsIsMutable();
newMediaItems_.add(value);
onChanged();
} else {
newMediaItemsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNewMediaItems(
int index, com.google.photos.library.v1.proto.NewMediaItem value) {
if (newMediaItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNewMediaItemsIsMutable();
newMediaItems_.add(index, value);
onChanged();
} else {
newMediaItemsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNewMediaItems(
com.google.photos.library.v1.proto.NewMediaItem.Builder builderForValue) {
if (newMediaItemsBuilder_ == null) {
ensureNewMediaItemsIsMutable();
newMediaItems_.add(builderForValue.build());
onChanged();
} else {
newMediaItemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addNewMediaItems(
int index, com.google.photos.library.v1.proto.NewMediaItem.Builder builderForValue) {
if (newMediaItemsBuilder_ == null) {
ensureNewMediaItemsIsMutable();
newMediaItems_.add(index, builderForValue.build());
onChanged();
} else {
newMediaItemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addAllNewMediaItems(
java.lang.Iterable<? extends com.google.photos.library.v1.proto.NewMediaItem> values) {
if (newMediaItemsBuilder_ == null) {
ensureNewMediaItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, newMediaItems_);
onChanged();
} else {
newMediaItemsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearNewMediaItems() {
if (newMediaItemsBuilder_ == null) {
newMediaItems_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
newMediaItemsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeNewMediaItems(int index) {
if (newMediaItemsBuilder_ == null) {
ensureNewMediaItemsIsMutable();
newMediaItems_.remove(index);
onChanged();
} else {
newMediaItemsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.photos.library.v1.proto.NewMediaItem.Builder getNewMediaItemsBuilder(
int index) {
return getNewMediaItemsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.photos.library.v1.proto.NewMediaItemOrBuilder getNewMediaItemsOrBuilder(
int index) {
if (newMediaItemsBuilder_ == null) {
return newMediaItems_.get(index);
} else {
return newMediaItemsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<? extends com.google.photos.library.v1.proto.NewMediaItemOrBuilder>
getNewMediaItemsOrBuilderList() {
if (newMediaItemsBuilder_ != null) {
return newMediaItemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(newMediaItems_);
}
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.photos.library.v1.proto.NewMediaItem.Builder addNewMediaItemsBuilder() {
return getNewMediaItemsFieldBuilder()
.addBuilder(com.google.photos.library.v1.proto.NewMediaItem.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.photos.library.v1.proto.NewMediaItem.Builder addNewMediaItemsBuilder(
int index) {
return getNewMediaItemsFieldBuilder()
.addBuilder(index, com.google.photos.library.v1.proto.NewMediaItem.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. List of media items to be created.
* </pre>
*
* <code>
* repeated .google.photos.library.v1.NewMediaItem new_media_items = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.photos.library.v1.proto.NewMediaItem.Builder>
getNewMediaItemsBuilderList() {
return getNewMediaItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.library.v1.proto.NewMediaItem,
com.google.photos.library.v1.proto.NewMediaItem.Builder,
com.google.photos.library.v1.proto.NewMediaItemOrBuilder>
getNewMediaItemsFieldBuilder() {
if (newMediaItemsBuilder_ == null) {
newMediaItemsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.library.v1.proto.NewMediaItem,
com.google.photos.library.v1.proto.NewMediaItem.Builder,
com.google.photos.library.v1.proto.NewMediaItemOrBuilder>(
newMediaItems_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
newMediaItems_ = null;
}
return newMediaItemsBuilder_;
}
private com.google.photos.library.v1.proto.AlbumPosition albumPosition_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.photos.library.v1.proto.AlbumPosition,
com.google.photos.library.v1.proto.AlbumPosition.Builder,
com.google.photos.library.v1.proto.AlbumPositionOrBuilder>
albumPositionBuilder_;
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*
* @return Whether the albumPosition field is set.
*/
public boolean hasAlbumPosition() {
return albumPositionBuilder_ != null || albumPosition_ != null;
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*
* @return The albumPosition.
*/
public com.google.photos.library.v1.proto.AlbumPosition getAlbumPosition() {
if (albumPositionBuilder_ == null) {
return albumPosition_ == null
? com.google.photos.library.v1.proto.AlbumPosition.getDefaultInstance()
: albumPosition_;
} else {
return albumPositionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
public Builder setAlbumPosition(com.google.photos.library.v1.proto.AlbumPosition value) {
if (albumPositionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
albumPosition_ = value;
onChanged();
} else {
albumPositionBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
public Builder setAlbumPosition(
com.google.photos.library.v1.proto.AlbumPosition.Builder builderForValue) {
if (albumPositionBuilder_ == null) {
albumPosition_ = builderForValue.build();
onChanged();
} else {
albumPositionBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
public Builder mergeAlbumPosition(com.google.photos.library.v1.proto.AlbumPosition value) {
if (albumPositionBuilder_ == null) {
if (albumPosition_ != null) {
albumPosition_ =
com.google.photos.library.v1.proto.AlbumPosition.newBuilder(albumPosition_)
.mergeFrom(value)
.buildPartial();
} else {
albumPosition_ = value;
}
onChanged();
} else {
albumPositionBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
public Builder clearAlbumPosition() {
if (albumPositionBuilder_ == null) {
albumPosition_ = null;
onChanged();
} else {
albumPosition_ = null;
albumPositionBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
public com.google.photos.library.v1.proto.AlbumPosition.Builder getAlbumPositionBuilder() {
onChanged();
return getAlbumPositionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
public com.google.photos.library.v1.proto.AlbumPositionOrBuilder getAlbumPositionOrBuilder() {
if (albumPositionBuilder_ != null) {
return albumPositionBuilder_.getMessageOrBuilder();
} else {
return albumPosition_ == null
? com.google.photos.library.v1.proto.AlbumPosition.getDefaultInstance()
: albumPosition_;
}
}
/**
*
*
* <pre>
* Position in the album where the media items are added. If not
* specified, the media items are added to the end of the album (as per
* the default value, that is, `LAST_IN_ALBUM`). The request fails if this
* field is set and the `albumId` is not specified. The request will also fail
* if you set the field and are not the owner of the shared album.
* </pre>
*
* <code>.google.photos.library.v1.AlbumPosition album_position = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.photos.library.v1.proto.AlbumPosition,
com.google.photos.library.v1.proto.AlbumPosition.Builder,
com.google.photos.library.v1.proto.AlbumPositionOrBuilder>
getAlbumPositionFieldBuilder() {
if (albumPositionBuilder_ == null) {
albumPositionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.photos.library.v1.proto.AlbumPosition,
com.google.photos.library.v1.proto.AlbumPosition.Builder,
com.google.photos.library.v1.proto.AlbumPositionOrBuilder>(
getAlbumPosition(), getParentForChildren(), isClean());
albumPosition_ = null;
}
return albumPositionBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.photos.library.v1.BatchCreateMediaItemsRequest)
}
// @@protoc_insertion_point(class_scope:google.photos.library.v1.BatchCreateMediaItemsRequest)
private static final com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest();
}
public static com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreateMediaItemsRequest> PARSER =
new com.google.protobuf.AbstractParser<BatchCreateMediaItemsRequest>() {
@java.lang.Override
public BatchCreateMediaItemsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BatchCreateMediaItemsRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<BatchCreateMediaItemsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreateMediaItemsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.operators.testutils;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.functions.Function;
import org.apache.flink.api.common.functions.util.FunctionUtils;
import org.apache.flink.api.common.typeutils.TypeComparator;
import org.apache.flink.api.common.typeutils.TypeSerializerFactory;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.runtime.metrics.groups.OperatorMetricGroup;
import org.apache.flink.runtime.operators.Driver;
import org.apache.flink.runtime.operators.ResettableDriver;
import org.apache.flink.runtime.operators.TaskContext;
import org.apache.flink.runtime.operators.sort.UnilateralSortMerger;
import org.apache.flink.runtime.operators.util.TaskConfig;
import org.apache.flink.runtime.taskmanager.TaskManagerRuntimeInfo;
import org.apache.flink.runtime.testutils.recordutils.RecordComparator;
import org.apache.flink.runtime.testutils.recordutils.RecordSerializerFactory;
import org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo;
import org.apache.flink.types.Record;
import org.apache.flink.util.Collector;
import org.apache.flink.util.MutableObjectIterator;
import org.apache.flink.util.TestLogger;
import org.junit.After;
import org.junit.Assert;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
@RunWith(Parameterized.class)
public class DriverTestBase<S extends Function> extends TestLogger implements TaskContext<S, Record> {
protected static final long DEFAULT_PER_SORT_MEM = 16 * 1024 * 1024;
protected static final int PAGE_SIZE = 32 * 1024;
private final IOManager ioManager;
private final MemoryManager memManager;
private final List<MutableObjectIterator<Record>> inputs;
private final List<TypeComparator<Record>> comparators;
private final List<UnilateralSortMerger<Record>> sorters;
private final AbstractInvokable owner;
private final TaskConfig taskConfig;
private final TaskManagerRuntimeInfo taskManageInfo;
protected final long perSortMem;
protected final double perSortFractionMem;
private Collector<Record> output;
protected int numFileHandles;
private S stub;
private Driver<S, Record> driver;
private volatile boolean running = true;
private ExecutionConfig executionConfig;
protected DriverTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters) {
this(executionConfig, memory, maxNumSorters, DEFAULT_PER_SORT_MEM);
}
protected DriverTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters, long perSortMemory) {
if (memory < 0 || maxNumSorters < 0 || perSortMemory < 0) {
throw new IllegalArgumentException();
}
final long totalMem = Math.max(memory, 0) + (Math.max(maxNumSorters, 0) * perSortMemory);
this.perSortMem = perSortMemory;
this.perSortFractionMem = (double)perSortMemory/totalMem;
this.ioManager = new IOManagerAsync();
this.memManager = totalMem > 0 ? new MemoryManager(totalMem,1) : null;
this.inputs = new ArrayList<MutableObjectIterator<Record>>();
this.comparators = new ArrayList<TypeComparator<Record>>();
this.sorters = new ArrayList<UnilateralSortMerger<Record>>();
this.owner = new DummyInvokable();
this.taskConfig = new TaskConfig(new Configuration());
this.executionConfig = executionConfig;
this.taskManageInfo = new TestingTaskManagerRuntimeInfo();
}
@Parameterized.Parameters
public static Collection<Object[]> getConfigurations() {
LinkedList<Object[]> configs = new LinkedList<Object[]>();
ExecutionConfig withReuse = new ExecutionConfig();
withReuse.enableObjectReuse();
ExecutionConfig withoutReuse = new ExecutionConfig();
withoutReuse.disableObjectReuse();
Object[] a = { withoutReuse };
configs.add(a);
Object[] b = { withReuse };
configs.add(b);
return configs;
}
public void addInput(MutableObjectIterator<Record> input) {
this.inputs.add(input);
this.sorters.add(null);
}
public void addInputSorted(MutableObjectIterator<Record> input, RecordComparator comp) throws Exception {
UnilateralSortMerger<Record> sorter = new UnilateralSortMerger<Record>(
this.memManager, this.ioManager, input, this.owner, RecordSerializerFactory.get(), comp,
this.perSortFractionMem, 32, 0.8f, true /*use large record handler*/, true);
this.sorters.add(sorter);
this.inputs.add(null);
}
public void addDriverComparator(RecordComparator comparator) {
this.comparators.add(comparator);
}
public void setOutput(Collector<Record> output) {
this.output = output;
}
public void setOutput(List<Record> output) {
this.output = new ListOutputCollector(output);
}
public int getNumFileHandlesForSort() {
return numFileHandles;
}
public void setNumFileHandlesForSort(int numFileHandles) {
this.numFileHandles = numFileHandles;
}
@SuppressWarnings("rawtypes")
public void testDriver(Driver driver, Class stubClass) throws Exception {
testDriverInternal(driver, stubClass);
}
@SuppressWarnings({"unchecked","rawtypes"})
public void testDriverInternal(Driver driver, Class stubClass) throws Exception {
this.driver = driver;
driver.setup(this);
this.stub = (S)stubClass.newInstance();
// regular running logic
boolean stubOpen = false;
try {
// run the data preparation
try {
driver.prepare();
}
catch (Throwable t) {
throw new Exception("The data preparation caused an error: " + t.getMessage(), t);
}
// open stub implementation
try {
FunctionUtils.openFunction(this.stub, getTaskConfig().getStubParameters());
stubOpen = true;
}
catch (Throwable t) {
throw new Exception("The user defined 'open()' method caused an exception: " + t.getMessage(), t);
}
if (!running) {
return;
}
// run the user code
driver.run();
// close. We close here such that a regular close throwing an exception marks a task as failed.
if (this.running) {
FunctionUtils.closeFunction (this.stub);
stubOpen = false;
}
this.output.close();
}
catch (Exception ex) {
// close the input, but do not report any exceptions, since we already have another root cause
if (stubOpen) {
try {
FunctionUtils.closeFunction(this.stub);
}
catch (Throwable ignored) {}
}
// if resettable driver invoke tear down
if (this.driver instanceof ResettableDriver) {
final ResettableDriver<?, ?> resDriver = (ResettableDriver<?, ?>) this.driver;
try {
resDriver.teardown();
} catch (Throwable t) {
throw new Exception("Error while shutting down an iterative operator: " + t.getMessage(), t);
}
}
// drop exception, if the task was canceled
if (this.running) {
throw ex;
}
}
finally {
driver.cleanup();
}
}
@SuppressWarnings({"unchecked","rawtypes"})
public void testResettableDriver(ResettableDriver driver, Class stubClass, int iterations) throws Exception {
driver.setup(this);
for(int i = 0; i < iterations; i++) {
if(i == 0) {
driver.initialize();
}
else {
driver.reset();
}
testDriver(driver, stubClass);
}
driver.teardown();
}
public void cancel() throws Exception {
this.running = false;
// compensate for races, where cancel is called before the driver is set
// not that this is an artifact of a bad design of this test base, where the setup
// of the basic properties is not separated from the invocation of the execution logic
while (this.driver == null) {
Thread.sleep(200);
}
this.driver.cancel();
}
// --------------------------------------------------------------------------------------------
@Override
public TaskConfig getTaskConfig() {
return this.taskConfig;
}
@Override
public TaskManagerRuntimeInfo getTaskManagerInfo() {
return this.taskManageInfo;
}
@Override
public ExecutionConfig getExecutionConfig() {
return executionConfig;
}
@Override
public ClassLoader getUserCodeClassLoader() {
return getClass().getClassLoader();
}
@Override
public IOManager getIOManager() {
return this.ioManager;
}
@Override
public MemoryManager getMemoryManager() {
return this.memManager;
}
@Override
public <X> MutableObjectIterator<X> getInput(int index) {
MutableObjectIterator<Record> in = this.inputs.get(index);
if (in == null) {
// waiting from sorter
try {
in = this.sorters.get(index).getIterator();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted");
}
this.inputs.set(index, in);
}
@SuppressWarnings("unchecked")
MutableObjectIterator<X> input = (MutableObjectIterator<X>) this.inputs.get(index);
return input;
}
@Override
public <X> TypeSerializerFactory<X> getInputSerializer(int index) {
@SuppressWarnings("unchecked")
TypeSerializerFactory<X> factory = (TypeSerializerFactory<X>) RecordSerializerFactory.get();
return factory;
}
@Override
public <X> TypeComparator<X> getDriverComparator(int index) {
@SuppressWarnings("unchecked")
TypeComparator<X> comparator = (TypeComparator<X>) this.comparators.get(index);
return comparator;
}
@Override
public S getStub() {
return this.stub;
}
@Override
public Collector<Record> getOutputCollector() {
return this.output;
}
@Override
public AbstractInvokable getContainingTask() {
return this.owner;
}
@Override
public String formatLogString(String message) {
return "Driver Tester: " + message;
}
@Override
public OperatorMetricGroup getMetricGroup() {
return new UnregisteredTaskMetricsGroup.DummyOperatorMetricGroup();
}
// --------------------------------------------------------------------------------------------
@After
public void shutdownAll() throws Exception {
// 1st, shutdown sorters
for (UnilateralSortMerger<?> sorter : this.sorters) {
if (sorter != null) {
sorter.close();
}
}
this.sorters.clear();
// 2nd, shutdown I/O
this.ioManager.shutdown();
Assert.assertTrue("I/O Manager has not properly shut down.", this.ioManager.isProperlyShutDown());
// last, verify all memory is returned and shutdown mem manager
MemoryManager memMan = getMemoryManager();
if (memMan != null) {
Assert.assertTrue("Memory Manager managed memory was not completely freed.", memMan.verifyEmpty());
memMan.shutdown();
}
}
// --------------------------------------------------------------------------------------------
private static final class ListOutputCollector implements Collector<Record> {
private final List<Record> output;
public ListOutputCollector(List<Record> outputList) {
this.output = outputList;
}
@Override
public void collect(Record record) {
this.output.add(record.createCopy());
}
@Override
public void close() {}
}
public static final class CountingOutputCollector implements Collector<Record> {
private int num;
@Override
public void collect(Record record) {
this.num++;
}
@Override
public void close() {}
public int getNumberOfRecords() {
return this.num;
}
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/kernel/tags/sakai-10.1/kernel-impl/src/main/java/org/sakaiproject/tool/impl/ToolImpl.java $
* $Id: ToolImpl.java 107506 2012-04-24 13:58:10Z matthew.buckett@oucs.ox.ac.uk $
***********************************************************************************
*
* Copyright (c) 2005, 2006, 2007, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.impl;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.tool.api.ActiveToolManager;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.api.ToolManager;
import org.sakaiproject.util.ResourceLoader;
/**
* <p>
* Tool is a utility class that implements the Tool interface.
* </p>
*/
public class ToolImpl implements Tool, Comparable
{
/** Our log (commons). */
private static Log M_log = LogFactory.getLog(ToolImpl.class);
/** The access security. */
protected Tool.AccessSecurity m_accessSecurity = Tool.AccessSecurity.PORTAL;
/** The tool Manager that possesses the RessourceBundle. */
private ToolManager m_toolManager;
/** The set of categories. */
protected Set m_categories = new HashSet();
/** The description string. */
protected String m_description = null;
/** The configuration properties that are set by registration and may not be changed by confguration. */
protected Properties m_finalConfig = new Properties();
/** Home destination. */
protected String m_home = null;
/** The well known identifier string. */
protected String m_id = null;
/** The set of keywords. */
protected Set m_keywords = new HashSet();
/** The configuration properties that may be changed by configuration. */
protected Properties m_mutableConfig = new Properties();
/** The title string. */
protected String m_title = null;
/** Localization data. */
public ResourceLoader m_title_local = null;
public Map m_title_bundle = null;
/**
* Construct
*/
public ToolImpl(ToolManager activeToolManager)
{
m_toolManager = activeToolManager;
}
/**
* @inheritDoc
*/
public int compareTo(Object obj)
{
// let it throw a class case exception if the obj is not some sort of Tool
org.sakaiproject.tool.api.Tool tool = (org.sakaiproject.tool.api.Tool) obj;
// do an id based
return getId().compareTo(tool.getId());
}
/**
* {@inheritDoc}
*/
public boolean equals(Object obj)
{
if (!(obj instanceof ToolImpl))
{
return false;
}
return ((ToolImpl) obj).getId().equals(getId());
}
/**
* @inheritDoc
*/
public Tool.AccessSecurity getAccessSecurity()
{
return m_accessSecurity;
}
/**
* @inheritDoc
*/
public Set getCategories()
{
return Collections.unmodifiableSet(m_categories);
}
/**
* @inheritDoc
*/
public String getDescription()
{
final String localizedToolDescription = m_toolManager.getLocalizedToolProperty(this.getId(), "description");
if(localizedToolDescription == null)
{
return m_description;
}
else
{
return localizedToolDescription;
}
}
/**
* @inheritDoc
*/
public Properties getFinalConfig()
{
// return a copy so that it is read only
Properties rv = new Properties();
rv.putAll(m_finalConfig);
return rv;
}
/**
* {@inheritDoc}
*/
public String getHome()
{
return m_home;
}
/**
* @inheritDoc
*/
public String getId()
{
return m_id;
}
/**
* @inheritDoc
*/
public Set getKeywords()
{
return Collections.unmodifiableSet(m_keywords);
}
/**
* @inheritDoc
*/
public Properties getMutableConfig()
{
// return a copy so that it is read only
Properties rv = new Properties();
rv.putAll(m_mutableConfig);
return rv;
}
/**
* @inheritDoc
*/
public Properties getRegisteredConfig()
{
// combine the final and mutable, and return a copy so that it is read only
Properties rv = new Properties();
rv.putAll(m_finalConfig);
rv.putAll(m_mutableConfig);
return rv;
}
/**
* @inheritDoc
*
* Modified to fix SAK-8908 by Mark Norton.
* This implementation of getTitle() uses a three tier lookup strategy:
* <OL>
* <LI>If the title is present in a central tool bundle, use it.</LI>
* <LI>If there is a tool title resource bundle in the tool package, use it.</LI>
* <LI>Otherwise default to the title registered in the tool registration file.</LI>
* </OL>
*/
public String getTitle()
{
final String centralToolTitle = m_toolManager.getLocalizedToolProperty(this.getId(), "title");
if (centralToolTitle != null)
return centralToolTitle;
String localizedToolTitle = null;
// Titles have extra logic that isn't present for descriptions (WHY WHY WHY)
if (m_title_bundle != null)
{
// Get the user's current locale preference.
ResourceLoader rl = new ResourceLoader();
String loc =rl.getLocale().toString();
// Attempt to get the properties corresponding to that locale.
Properties props = (Properties) m_title_bundle.get(loc);
// If a localized set doesn't exist, try for a default set.
if (props == null)
props = (Properties) m_title_bundle.get("DEFAULT");
// Get the localized tool title.
if (props != null)
localizedToolTitle = (String) props.get ("title");
}
if (localizedToolTitle != null)
return localizedToolTitle;
// Use the the default tool title from tool definition file.
return m_title;
}
/**
* {@inheritDoc}
*/
public int hashCode()
{
return getId().hashCode();
}
/**
* Set the access security.
*
* @param access
* The new access security setting.
*/
public void setAccessSecurity(Tool.AccessSecurity access)
{
m_accessSecurity = access;
}
/**
* Set the categories.
*
* @param categories
* The new categories set (Strings).
*/
public void setCategories(Set categories)
{
m_categories = categories;
}
/**
* Set the description.
*
* @param description
* The description to set.
*/
public void setDescription(String description)
{
m_description = description;
}
public void setHome(String home)
{
m_home = home;
}
/**
* Set the id.
*
* @param m_id
* The m_id to set.
*/
public void setId(String id)
{
m_id = id;
}
/**
* Set the keywords.
*
* @param keywords
* The new keywords set (Strings).
*/
public void setKeywords(Set keywords)
{
m_keywords = keywords;
}
/**
* Set the registered configuration.
*
* @param config
* The new registered configuration Properties.
*/
public void setRegisteredConfig(Properties finalConfig, Properties mutableConfig)
{
m_finalConfig = finalConfig;
if (m_finalConfig == null)
{
m_finalConfig = new Properties();
}
m_mutableConfig = mutableConfig;
if (m_mutableConfig == null)
{
m_mutableConfig = new Properties();
}
}
/**
* Set the title.
*
* @param title
* The title to set.
*/
public void setTitle(String title)
{
m_title = title;
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014-2016 Christian Schudt
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package rocks.xmpp.core.sasl.model;
import rocks.xmpp.core.stream.model.StreamElement;
import javax.xml.XMLConstants;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
import java.util.Locale;
import java.util.Objects;
/**
* The implementation of the {@code <failure/>} element, which indicates a SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-process-neg-failure">6.4.5. SASL Failure</a></cite></p>
* <p>The receiving entity reports failure of the handshake for this authentication mechanism by sending a {@code <failure/>} element qualified by the 'urn:ietf:params:xml:ns:xmpp-sasl' namespace.</p>
* </blockquote>
* <p>
* This class is immutable.
*
* @author Christian Schudt
*/
@XmlRootElement
public final class Failure implements StreamElement {
@XmlElements({@XmlElement(name = "aborted", type = Aborted.class),
@XmlElement(name = "account-disabled", type = AccountDisabled.class),
@XmlElement(name = "credentials-expired", type = CredentialsExpired.class),
@XmlElement(name = "encryption-required", type = EncryptionRequired.class),
@XmlElement(name = "incorrect-encoding", type = IncorrectEncoding.class),
@XmlElement(name = "invalid-authzid", type = InvalidAuthzid.class),
@XmlElement(name = "invalid-mechanism", type = InvalidMechanism.class),
@XmlElement(name = "malformed-request", type = MalformedRequest.class),
@XmlElement(name = "mechanism-too-weak", type = MechanismTooWeak.class),
@XmlElement(name = "not-authorized", type = NotAuthorized.class),
@XmlElement(name = "temporary-auth-failure", type = TemporaryAuthFailure.class)
})
private final Condition condition;
private final Text text;
/**
* Private default constructor, needed for unmarshalling.
*/
private Failure() {
this.condition = null;
this.text = null;
}
public Failure(Condition condition) {
this.condition = Objects.requireNonNull(condition);
this.text = null;
}
public Failure(Condition condition, String text, Locale locale) {
this.condition = Objects.requireNonNull(condition);
if (text != null) {
this.text = new Text(text, locale);
} else {
this.text = null;
}
}
/**
* Gets the defined error condition.
*
* @return The error condition.
*/
public final Condition getCondition() {
return condition;
}
/**
* Gets the text of the failure.
*
* @return The text.
*/
public final String getText() {
if (text != null) {
return text.text;
}
return null;
}
/**
* Gets the language of the text.
*
* @return The language.
*/
public final Locale getLanguage() {
if (text != null) {
return text.lang;
}
return null;
}
@Override
public final String toString() {
String text = getText();
return "SASL failure:" + (condition != null ? " " + condition.toString() : "") + (text != null ? " (" + text + ')' : "");
}
/**
* The implementation of the {@code <aborted/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-aborted">6.5.1. aborted</a></cite></p>
* <p>The receiving entity acknowledges that the authentication handshake has been aborted by the initiating entity; sent in reply to the {@code <abort/>} element.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class Aborted extends Condition {
private Aborted() {
}
private static Aborted create() {
return (Aborted) ABORTED;
}
}
/**
* The implementation of the {@code <account-disabled/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-account-disabled">6.5.2. account-disabled</a></cite></p>
* <p>The account of the initiating entity has been temporarily disabled; sent in reply to an {@code <auth/>} element (with or without initial response data) or a {@code <response/>} element.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class AccountDisabled extends Condition {
private AccountDisabled() {
}
private static AccountDisabled create() {
return (AccountDisabled) ACCOUNT_DISABLED;
}
}
/**
* The implementation of the {@code <credentials-expired/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-credentials-expired">6.5.3. credentials-expired</a></cite></p>
* <p>The authentication failed because the initiating entity provided credentials that have expired; sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class CredentialsExpired extends Condition {
private CredentialsExpired() {
}
private static CredentialsExpired create() {
return (CredentialsExpired) CREDENTIALS_EXPIRED;
}
}
/**
* The implementation of the {@code <encryption-required/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-encryption-required">6.5.4. encryption-required</a></cite></p>
* <p>The mechanism requested by the initiating entity cannot be used unless the confidentiality and integrity of the underlying stream are protected (typically via TLS); sent in reply to an {@code <auth/>} element (with or without initial response data).</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class EncryptionRequired extends Condition {
private EncryptionRequired() {
}
private static EncryptionRequired create() {
return (EncryptionRequired) ENCRYPTION_REQUIRED;
}
}
/**
* The implementation of the {@code <incorrect-encoding/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-incorrect-encoding">6.5.5. incorrect-encoding</a></cite></p>
* <p>The data provided by the initiating entity could not be processed because the base 64 encoding is incorrect (e.g., because the encoding does not adhere to the definition in Section 4 of [BASE64]); sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class IncorrectEncoding extends Condition {
private IncorrectEncoding() {
}
private static IncorrectEncoding create() {
return (IncorrectEncoding) INCORRECT_ENCODING;
}
}
/**
* The implementation of the {@code <invalid-authzid/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-invalid-authzid">6.5.6. invalid-authzid</a></cite></p>
* <p>The authzid provided by the initiating entity is invalid, either because it is incorrectly formatted or because the initiating entity does not have permissions to authorize that ID; sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class InvalidAuthzid extends Condition {
private InvalidAuthzid() {
}
private static InvalidAuthzid create() {
return (InvalidAuthzid) INVALID_AUTHZID;
}
}
/**
* The implementation of the {@code <invalid-mechanism/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-invalid-mechanism">6.5.7. invalid-mechanism</a></cite></p>
* <p>The initiating entity did not specify a mechanism, or requested a mechanism that is not supported by the receiving entity; sent in reply to an {@code <auth/>} element.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class InvalidMechanism extends Condition {
private InvalidMechanism() {
}
private static InvalidMechanism create() {
return (InvalidMechanism) INVALID_MECHANISM;
}
}
/**
* The implementation of the {@code <malformed-request/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-malformed-request">6.5.8. malformed-request</a></cite></p>
* <p>The request is malformed (e.g., the {@code <auth/>} element includes initial response data but the mechanism does not allow that, or the data sent violates the syntax for the specified SASL mechanism); sent in reply to an {@code <abort/>}, {@code <auth/>}, {@code <challenge/>}, or {@code <response/>} element.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static class MalformedRequest extends Condition {
private MalformedRequest() {
}
private static MalformedRequest create() {
return (MalformedRequest) MALFORMED_REQUEST;
}
}
/**
* The implementation of the {@code <mechanism-too-weak/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-mechanism-too-weak">6.5.9. mechanism-too-weak</a></cite></p>
* <p>The mechanism requested by the initiating entity is weaker than server policy permits for that initiating entity; sent in reply to an {@code <auth/>} element (with or without initial response data).</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class MechanismTooWeak extends Condition {
private MechanismTooWeak() {
}
private static MechanismTooWeak create() {
return (MechanismTooWeak) MECHANISM_TOO_WEAK;
}
}
/**
* The implementation of the {@code <not-authorized/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-not-authorized">6.5.10. not-authorized</a></cite></p>
* <p>The authentication failed because the initiating entity did not provide proper credentials, or because some generic authentication failure has occurred but the receiving entity does not wish to disclose specific information about the cause of the failure; sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class NotAuthorized extends Condition {
private NotAuthorized() {
}
private static NotAuthorized create() {
return (NotAuthorized) NOT_AUTHORIZED;
}
}
/**
* The implementation of the {@code <temporary-auth-failure/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-temporary-auth-failure">6.5.11. temporary-auth-failure</a></cite></p>
* <p>The authentication failed because of a temporary error condition within the receiving entity, and it is advisable for the initiating entity to try again later; sent in reply to an {@code <auth/>} element or a {@code <response/>} element.</p>
* </blockquote>
*/
@XmlType(factoryMethod = "create")
static final class TemporaryAuthFailure extends Condition {
private TemporaryAuthFailure() {
}
private static TemporaryAuthFailure create() {
return (TemporaryAuthFailure) TEMPORARY_AUTH_FAILURE;
}
}
/**
* A general class for a SASL failure condition.
*/
@XmlTransient
public abstract static class Condition {
/**
* The implementation of the {@code <aborted/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-aborted">6.5.1. aborted</a></cite></p>
* <p>The receiving entity acknowledges that the authentication handshake has been aborted by the initiating entity; sent in reply to the {@code <abort/>} element.</p>
* </blockquote>
*/
public static final Condition ABORTED = new Aborted();
/**
* The implementation of the {@code <account-disabled/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-account-disabled">6.5.2. account-disabled</a></cite></p>
* <p>The account of the initiating entity has been temporarily disabled; sent in reply to an {@code <auth/>} element (with or without initial response data) or a {@code <response/>} element.</p>
* </blockquote>
*/
public static final Condition ACCOUNT_DISABLED = new AccountDisabled();
/**
* The implementation of the {@code <credentials-expired/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-credentials-expired">6.5.3. credentials-expired</a></cite></p>
* <p>The authentication failed because the initiating entity provided credentials that have expired; sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
public static final Condition CREDENTIALS_EXPIRED = new CredentialsExpired();
/**
* The implementation of the {@code <encryption-required/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-encryption-required">6.5.4. encryption-required</a></cite></p>
* <p>The mechanism requested by the initiating entity cannot be used unless the confidentiality and integrity of the underlying stream are protected (typically via TLS); sent in reply to an {@code <auth/>} element (with or without initial response data).</p>
* </blockquote>
*/
public static final Condition ENCRYPTION_REQUIRED = new EncryptionRequired();
/**
* The implementation of the {@code <incorrect-encoding/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-incorrect-encoding">6.5.5. incorrect-encoding</a></cite></p>
* <p>The data provided by the initiating entity could not be processed because the base 64 encoding is incorrect (e.g., because the encoding does not adhere to the definition in Section 4 of [BASE64]); sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
public static final Condition INCORRECT_ENCODING = new IncorrectEncoding();
/**
* The implementation of the {@code <invalid-authzid/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-invalid-authzid">6.5.6. invalid-authzid</a></cite></p>
* <p>The authzid provided by the initiating entity is invalid, either because it is incorrectly formatted or because the initiating entity does not have permissions to authorize that ID; sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
public static final Condition INVALID_AUTHZID = new InvalidAuthzid();
/**
* The implementation of the {@code <invalid-mechanism/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-invalid-mechanism">6.5.7. invalid-mechanism</a></cite></p>
* <p>The initiating entity did not specify a mechanism, or requested a mechanism that is not supported by the receiving entity; sent in reply to an {@code <auth/>} element.</p>
* </blockquote>
*/
public static final Condition INVALID_MECHANISM = new InvalidMechanism();
/**
* The implementation of the {@code <malformed-request/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-malformed-request">6.5.8. malformed-request</a></cite></p>
* <p>The request is malformed (e.g., the {@code <auth/>} element includes initial response data but the mechanism does not allow that, or the data sent violates the syntax for the specified SASL mechanism); sent in reply to an {@code <abort/>}, {@code <auth/>}, {@code <challenge/>}, or {@code <response/>} element.</p>
* </blockquote>
*/
public static final Condition MALFORMED_REQUEST = new MalformedRequest();
/**
* The implementation of the {@code <mechanism-too-weak/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-mechanism-too-weak">6.5.9. mechanism-too-weak</a></cite></p>
* <p>The mechanism requested by the initiating entity is weaker than server policy permits for that initiating entity; sent in reply to an {@code <auth/>} element (with or without initial response data).</p>
* </blockquote>
*/
public static final Condition MECHANISM_TOO_WEAK = new MechanismTooWeak();
/**
* The implementation of the {@code <not-authorized/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-not-authorized">6.5.10. not-authorized</a></cite></p>
* <p>The authentication failed because the initiating entity did not provide proper credentials, or because some generic authentication failure has occurred but the receiving entity does not wish to disclose specific information about the cause of the failure; sent in reply to a {@code <response/>} element or an {@code <auth/>} element with initial response data.</p>
* </blockquote>
*/
public static final Condition NOT_AUTHORIZED = new NotAuthorized();
/**
* The implementation of the {@code <temporary-auth-failure/>} SASL failure.
* <blockquote>
* <p><cite><a href="http://xmpp.org/rfcs/rfc6120.html#sasl-errors-temporary-auth-failure">6.5.11. temporary-auth-failure</a></cite></p>
* <p>The authentication failed because of a temporary error condition within the receiving entity, and it is advisable for the initiating entity to try again later; sent in reply to an {@code <auth/>} element or a {@code <response/>} element.</p>
* </blockquote>
*/
public static final Condition TEMPORARY_AUTH_FAILURE = new TemporaryAuthFailure();
private Condition() {
}
@Override
public final String toString() {
return getClass().getSimpleName().replaceAll("([a-z])([A-Z]+)", "$1-$2").toLowerCase();
}
}
/**
* The text element of the failure.
*/
private static final class Text {
@XmlValue
private final String text;
@XmlAttribute(namespace = XMLConstants.XML_NS_URI)
private final Locale lang;
private Text() {
this.text = null;
this.lang = null;
}
private Text(final String text, final Locale lang) {
this.text = text;
this.lang = lang;
}
}
}
| |
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.auth.userpass;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.SignInDialog;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.ui.SmallHeading;
import com.google.gerrit.common.PageLinks;
import com.google.gerrit.common.auth.SignInMode;
import com.google.gerrit.common.auth.userpass.LoginResult;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.KeyCodes;
import com.google.gwt.event.dom.client.KeyPressEvent;
import com.google.gwt.event.dom.client.KeyPressHandler;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.DeferredCommand;
import com.google.gwt.user.client.Window.Location;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.InlineLabel;
import com.google.gwt.user.client.ui.PasswordTextBox;
import com.google.gwt.user.client.ui.TextBox;
import com.google.gwtexpui.globalkey.client.GlobalKey;
import com.google.gwtexpui.globalkey.client.NpTextBox;
public class UserPassSignInDialog extends SignInDialog {
static {
UserPassResources.I.css().ensureInjected();
}
private final FlowPanel formBody;
private FlowPanel errorLine;
private InlineLabel errorMsg;
private Button login;
private Button close;
private TextBox username;
private TextBox password;
public UserPassSignInDialog(final String token, final String initialErrorMsg) {
super(SignInMode.SIGN_IN, token);
setAutoHideEnabled(false);
formBody = new FlowPanel();
formBody.setStyleName(UserPassResources.I.css().loginForm());
add(formBody);
createHeaderText();
createErrorBox();
createUsernameBox();
if (initialErrorMsg != null) {
showError(initialErrorMsg);
}
}
@Override
public void show() {
super.show();
DeferredCommand.addCommand(new Command() {
@Override
public void execute() {
username.setFocus(true);
}
});
}
private void createHeaderText() {
final FlowPanel headerText = new FlowPanel();
final SmallHeading headerLabel = new SmallHeading();
headerLabel.setText(Util.M.signInAt(Location.getHostName()));
headerText.add(headerLabel);
formBody.add(headerText);
}
private void createErrorBox() {
errorLine = new FlowPanel();
DOM.setStyleAttribute(errorLine.getElement(), "visibility", "hidden");
errorLine.setStyleName(UserPassResources.I.css().error());
errorMsg = new InlineLabel();
errorLine.add(errorMsg);
formBody.add(errorLine);
}
private void showError(final String msgText) {
errorMsg.setText(msgText);
DOM.setStyleAttribute(errorLine.getElement(), "visibility", "");
}
private void hideError() {
DOM.setStyleAttribute(errorLine.getElement(), "visibility", "hidden");
}
private void createUsernameBox() {
username = new NpTextBox();
username.setVisibleLength(25);
username.addKeyPressHandler(new KeyPressHandler() {
@Override
public void onKeyPress(final KeyPressEvent event) {
if (event.getCharCode() == KeyCodes.KEY_ENTER) {
event.preventDefault();
password.selectAll();
password.setFocus(true);
}
}
});
password = new PasswordTextBox();
password.setVisibleLength(25);
password.addKeyPressHandler(GlobalKey.STOP_PROPAGATION);
password.addKeyPressHandler(new KeyPressHandler() {
@Override
public void onKeyPress(final KeyPressEvent event) {
if (event.getCharCode() == KeyCodes.KEY_ENTER) {
event.preventDefault();
onLogin();
}
}
});
final FlowPanel buttons = new FlowPanel();
buttons.setStyleName(Gerrit.RESOURCES.css().errorDialogButtons());
login = new Button();
login.setText(Util.C.buttonSignIn());
login.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
onLogin();
}
});
buttons.add(login);
close = new Button();
DOM.setStyleAttribute(close.getElement(), "marginLeft", "45px");
close.setText(Gerrit.C.signInDialogClose());
close.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
hide();
}
});
buttons.add(close);
final Grid formGrid = new Grid(3, 2);
formGrid.setText(0, 0, Util.C.username());
formGrid.setText(1, 0, Util.C.password());
formGrid.setWidget(0, 1, username);
formGrid.setWidget(1, 1, password);
formGrid.setWidget(2, 1, buttons);
formBody.add(formGrid);
username.setTabIndex(1);
password.setTabIndex(2);
login.setTabIndex(3);
close.setTabIndex(4);
}
private void enable(final boolean on) {
username.setEnabled(on);
password.setEnabled(on);
login.setEnabled(on);
}
private void onLogin() {
hideError();
final String user = username.getText();
if (user == null || user.equals("")) {
showError(Util.C.usernameRequired());
username.setFocus(true);
return;
}
final String pass = password.getText();
if (pass == null || pass.equals("")) {
showError(Util.C.passwordRequired());
password.setFocus(true);
return;
}
enable(false);
Util.SVC.authenticate(user, pass, new GerritCallback<LoginResult>() {
public void onSuccess(final LoginResult result) {
if (result.success) {
String to = token;
if (result.isNew && !to.startsWith(PageLinks.REGISTER + ",")) {
to = PageLinks.REGISTER + "," + to;
}
// Unfortunately we no longer support updating the web UI when the
// user signs in. Instead we must force a reload of the page, but
// that isn't easy because we might need to change the anchor. So
// we bounce through a little redirection servlet on the server.
//
Location.replace(Location.getPath() + "login/" + to);
} else {
showError(Util.C.invalidLogin());
enable(true);
password.selectAll();
DeferredCommand.addCommand(new Command() {
@Override
public void execute() {
password.setFocus(true);
}
});
}
}
@Override
public void onFailure(final Throwable caught) {
super.onFailure(caught);
enable(true);
}
});
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.lib.service.hadoop;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
import org.apache.hadoop.lib.service.FileSystemAccess;
import org.apache.hadoop.lib.service.FileSystemAccessException;
import org.apache.hadoop.lib.service.Instrumentation;
import org.apache.hadoop.lib.service.Scheduler;
import org.apache.hadoop.lib.util.Check;
import org.apache.hadoop.lib.util.ConfigurationUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.VersionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@InterfaceAudience.Private
public class FileSystemAccessService extends BaseService implements FileSystemAccess {
private static final Logger LOG = LoggerFactory.getLogger(FileSystemAccessService.class);
public static final String PREFIX = "hadoop";
private static final String INSTRUMENTATION_GROUP = "hadoop";
public static final String AUTHENTICATION_TYPE = "authentication.type";
public static final String KERBEROS_KEYTAB = "authentication.kerberos.keytab";
public static final String KERBEROS_PRINCIPAL = "authentication.kerberos.principal";
public static final String FS_CACHE_PURGE_FREQUENCY = "filesystem.cache.purge.frequency";
public static final String FS_CACHE_PURGE_TIMEOUT = "filesystem.cache.purge.timeout";
public static final String NAME_NODE_WHITELIST = "name.node.whitelist";
public static final String HADOOP_CONF_DIR = "config.dir";
private static final String[] HADOOP_CONF_FILES = {"core-site.xml", "hdfs-site.xml"};
private static final String FILE_SYSTEM_SERVICE_CREATED = "FileSystemAccessService.created";
private static class CachedFileSystem {
private FileSystem fs;
private long lastUse;
private long timeout;
private int count;
public CachedFileSystem(long timeout) {
this.timeout = timeout;
lastUse = -1;
count = 0;
}
synchronized FileSystem getFileSytem(Configuration conf)
throws IOException {
if (fs == null) {
fs = FileSystem.get(conf);
}
lastUse = -1;
count++;
return fs;
}
synchronized void release() throws IOException {
count--;
if (count == 0) {
if (timeout == 0) {
fs.close();
fs = null;
lastUse = -1;
}
else {
lastUse = System.currentTimeMillis();
}
}
}
// to avoid race conditions in the map cache adding removing entries
// an entry in the cache remains forever, it just closes/opens filesystems
// based on their utilization. Worse case scenario, the penalty we'll
// pay is that the amount of entries in the cache will be the total
// number of users in HDFS (which seems a resonable overhead).
synchronized boolean purgeIfIdle() throws IOException {
boolean ret = false;
if (count == 0 && lastUse != -1 &&
(System.currentTimeMillis() - lastUse) > timeout) {
fs.close();
fs = null;
lastUse = -1;
ret = true;
}
return ret;
}
}
public FileSystemAccessService() {
super(PREFIX);
}
private Collection<String> nameNodeWhitelist;
Configuration serviceHadoopConf;
private AtomicInteger unmanagedFileSystems = new AtomicInteger();
private ConcurrentHashMap<String, CachedFileSystem> fsCache =
new ConcurrentHashMap<String, CachedFileSystem>();
private long purgeTimeout;
@Override
protected void init() throws ServiceException {
LOG.info("Using FileSystemAccess JARs version [{}]", VersionInfo.getVersion());
String security = getServiceConfig().get(AUTHENTICATION_TYPE, "simple").trim();
if (security.equals("kerberos")) {
String defaultName = getServer().getName();
String keytab = System.getProperty("user.home") + "/" + defaultName + ".keytab";
keytab = getServiceConfig().get(KERBEROS_KEYTAB, keytab).trim();
if (keytab.length() == 0) {
throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_KEYTAB);
}
String principal = defaultName + "/localhost@LOCALHOST";
principal = getServiceConfig().get(KERBEROS_PRINCIPAL, principal).trim();
if (principal.length() == 0) {
throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_PRINCIPAL);
}
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
UserGroupInformation.setConfiguration(conf);
try {
UserGroupInformation.loginUserFromKeytab(principal, keytab);
} catch (IOException ex) {
throw new ServiceException(FileSystemAccessException.ERROR.H02, ex.getMessage(), ex);
}
LOG.info("Using FileSystemAccess Kerberos authentication, principal [{}] keytab [{}]", principal, keytab);
} else if (security.equals("simple")) {
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "simple");
UserGroupInformation.setConfiguration(conf);
LOG.info("Using FileSystemAccess simple/pseudo authentication, principal [{}]", System.getProperty("user.name"));
} else {
throw new ServiceException(FileSystemAccessException.ERROR.H09, security);
}
String hadoopConfDirProp = getServiceConfig().get(HADOOP_CONF_DIR, getServer().getConfigDir());
File hadoopConfDir = new File(hadoopConfDirProp).getAbsoluteFile();
if (hadoopConfDir == null) {
hadoopConfDir = new File(getServer().getConfigDir()).getAbsoluteFile();
}
if (!hadoopConfDir.exists()) {
throw new ServiceException(FileSystemAccessException.ERROR.H10, hadoopConfDir);
}
try {
serviceHadoopConf = loadHadoopConf(hadoopConfDir);
} catch (IOException ex) {
throw new ServiceException(FileSystemAccessException.ERROR.H11, ex.toString(), ex);
}
LOG.debug("FileSystemAccess FileSystem configuration:");
for (Map.Entry entry : serviceHadoopConf) {
LOG.debug(" {} = {}", entry.getKey(), entry.getValue());
}
setRequiredServiceHadoopConf(serviceHadoopConf);
nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST));
}
private Configuration loadHadoopConf(File dir) throws IOException {
Configuration hadoopConf = new Configuration(false);
for (String file : HADOOP_CONF_FILES) {
File f = new File(dir, file);
if (f.exists()) {
hadoopConf.addResource(new Path(f.getAbsolutePath()));
}
}
return hadoopConf;
}
@Override
public void postInit() throws ServiceException {
super.postInit();
Instrumentation instrumentation = getServer().get(Instrumentation.class);
instrumentation.addVariable(INSTRUMENTATION_GROUP, "unmanaged.fs", new Instrumentation.Variable<Integer>() {
@Override
public Integer getValue() {
return unmanagedFileSystems.get();
}
});
instrumentation.addSampler(INSTRUMENTATION_GROUP, "unmanaged.fs", 60, new Instrumentation.Variable<Long>() {
@Override
public Long getValue() {
return (long) unmanagedFileSystems.get();
}
});
Scheduler scheduler = getServer().get(Scheduler.class);
int purgeInterval = getServiceConfig().getInt(FS_CACHE_PURGE_FREQUENCY, 60);
purgeTimeout = getServiceConfig().getLong(FS_CACHE_PURGE_TIMEOUT, 60);
purgeTimeout = (purgeTimeout > 0) ? purgeTimeout : 0;
if (purgeTimeout > 0) {
scheduler.schedule(new FileSystemCachePurger(),
purgeInterval, purgeInterval, TimeUnit.SECONDS);
}
}
private class FileSystemCachePurger implements Runnable {
@Override
public void run() {
int count = 0;
for (CachedFileSystem cacheFs : fsCache.values()) {
try {
count += cacheFs.purgeIfIdle() ? 1 : 0;
} catch (Throwable ex) {
LOG.warn("Error while purging filesystem, " + ex.toString(), ex);
}
}
LOG.debug("Purged [{}} filesystem instances", count);
}
}
private Set<String> toLowerCase(Collection<String> collection) {
Set<String> set = new HashSet<String>();
for (String value : collection) {
set.add(value.toLowerCase());
}
return set;
}
@Override
public Class getInterface() {
return FileSystemAccess.class;
}
@Override
public Class[] getServiceDependencies() {
return new Class[]{Instrumentation.class, Scheduler.class};
}
protected UserGroupInformation getUGI(String user) throws IOException {
return UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
}
protected void setRequiredServiceHadoopConf(Configuration conf) {
conf.set("fs.hdfs.impl.disable.cache", "true");
}
private static final String HTTPFS_FS_USER = "httpfs.fs.user";
protected FileSystem createFileSystem(Configuration namenodeConf)
throws IOException {
String user = UserGroupInformation.getCurrentUser().getShortUserName();
CachedFileSystem newCachedFS = new CachedFileSystem(purgeTimeout);
CachedFileSystem cachedFS = fsCache.putIfAbsent(user, newCachedFS);
if (cachedFS == null) {
cachedFS = newCachedFS;
}
Configuration conf = new Configuration(namenodeConf);
conf.set(HTTPFS_FS_USER, user);
return cachedFS.getFileSytem(conf);
}
protected void closeFileSystem(FileSystem fs) throws IOException {
if (fsCache.containsKey(fs.getConf().get(HTTPFS_FS_USER))) {
fsCache.get(fs.getConf().get(HTTPFS_FS_USER)).release();
}
}
protected void validateNamenode(String namenode) throws FileSystemAccessException {
if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) {
if (!nameNodeWhitelist.contains(namenode.toLowerCase())) {
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not in whitelist");
}
}
}
protected void checkNameNodeHealth(FileSystem fileSystem) throws FileSystemAccessException {
}
@Override
public <T> T execute(String user, final Configuration conf, final FileSystemExecutor<T> executor)
throws FileSystemAccessException {
Check.notEmpty(user, "user");
Check.notNull(conf, "conf");
Check.notNull(executor, "executor");
if (!conf.getBoolean(FILE_SYSTEM_SERVICE_CREATED, false)) {
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H04);
}
if (conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY) == null ||
conf.getTrimmed(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY).length() == 0) {
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H06,
CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
}
try {
validateNamenode(
new URI(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)).
getAuthority());
UserGroupInformation ugi = getUGI(user);
return ugi.doAs(new PrivilegedExceptionAction<T>() {
@Override
public T run() throws Exception {
FileSystem fs = createFileSystem(conf);
Instrumentation instrumentation = getServer().get(Instrumentation.class);
Instrumentation.Cron cron = instrumentation.createCron();
try {
checkNameNodeHealth(fs);
cron.start();
return executor.execute(fs);
} finally {
cron.stop();
instrumentation.addCron(INSTRUMENTATION_GROUP, executor.getClass().getSimpleName(), cron);
closeFileSystem(fs);
}
}
});
} catch (FileSystemAccessException ex) {
throw ex;
} catch (Exception ex) {
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H03, ex);
}
}
public FileSystem createFileSystemInternal(String user, final Configuration conf)
throws IOException, FileSystemAccessException {
Check.notEmpty(user, "user");
Check.notNull(conf, "conf");
if (!conf.getBoolean(FILE_SYSTEM_SERVICE_CREATED, false)) {
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H04);
}
try {
validateNamenode(
new URI(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)).getAuthority());
UserGroupInformation ugi = getUGI(user);
return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
@Override
public FileSystem run() throws Exception {
return createFileSystem(conf);
}
});
} catch (IOException ex) {
throw ex;
} catch (FileSystemAccessException ex) {
throw ex;
} catch (Exception ex) {
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H08, ex.getMessage(), ex);
}
}
@Override
public FileSystem createFileSystem(String user, final Configuration conf) throws IOException,
FileSystemAccessException {
unmanagedFileSystems.incrementAndGet();
return createFileSystemInternal(user, conf);
}
@Override
public void releaseFileSystem(FileSystem fs) throws IOException {
unmanagedFileSystems.decrementAndGet();
closeFileSystem(fs);
}
@Override
public Configuration getFileSystemConfiguration() {
Configuration conf = new Configuration(true);
ConfigurationUtils.copy(serviceHadoopConf, conf);
conf.setBoolean(FILE_SYSTEM_SERVICE_CREATED, true);
// Force-clear server-side umask to make HttpFS match WebHDFS behavior
conf.set(FsPermission.UMASK_LABEL, "000");
return conf;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.partition.consumer;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.metrics.Counter;
import org.apache.flink.runtime.checkpoint.CheckpointException;
import org.apache.flink.runtime.checkpoint.channel.ChannelStateWriter;
import org.apache.flink.runtime.event.AbstractEvent;
import org.apache.flink.runtime.event.TaskEvent;
import org.apache.flink.runtime.io.network.api.CheckpointBarrier;
import org.apache.flink.runtime.io.network.api.serialization.EventSerializer;
import org.apache.flink.runtime.io.network.buffer.Buffer;
import org.apache.flink.runtime.io.network.logger.NetworkActionsLogger;
import org.apache.flink.runtime.io.network.partition.ChannelStateHolder;
import org.apache.flink.runtime.io.network.partition.ResultPartitionID;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import static org.apache.flink.runtime.checkpoint.CheckpointFailureReason.CHECKPOINT_DECLINED_TASK_NOT_READY;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.Preconditions.checkState;
/** An input channel reads recovered state from previous unaligned checkpoint snapshots. */
public abstract class RecoveredInputChannel extends InputChannel implements ChannelStateHolder {
private static final Logger LOG = LoggerFactory.getLogger(RecoveredInputChannel.class);
private final ArrayDeque<Buffer> receivedBuffers = new ArrayDeque<>();
private final CompletableFuture<?> stateConsumedFuture = new CompletableFuture<>();
protected final BufferManager bufferManager;
@GuardedBy("receivedBuffers")
private boolean isReleased;
protected ChannelStateWriter channelStateWriter;
/**
* The buffer number of recovered buffers. Starts at MIN_VALUE to have no collisions with actual
* buffer numbers.
*/
private int sequenceNumber = Integer.MIN_VALUE;
protected final int networkBuffersPerChannel;
private boolean exclusiveBuffersAssigned;
private long lastStoppedCheckpointId = -1;
RecoveredInputChannel(
SingleInputGate inputGate,
int channelIndex,
ResultPartitionID partitionId,
int initialBackoff,
int maxBackoff,
Counter numBytesIn,
Counter numBuffersIn,
int networkBuffersPerChannel) {
super(
inputGate,
channelIndex,
partitionId,
initialBackoff,
maxBackoff,
numBytesIn,
numBuffersIn);
bufferManager = new BufferManager(inputGate.getMemorySegmentProvider(), this, 0);
this.networkBuffersPerChannel = networkBuffersPerChannel;
}
@Override
public void setChannelStateWriter(ChannelStateWriter channelStateWriter) {
checkState(this.channelStateWriter == null, "Already initialized");
this.channelStateWriter = checkNotNull(channelStateWriter);
}
public final InputChannel toInputChannel() throws IOException {
Preconditions.checkState(
stateConsumedFuture.isDone(), "recovered state is not fully consumed");
final InputChannel inputChannel = toInputChannelInternal();
inputChannel.checkpointStopped(lastStoppedCheckpointId);
return inputChannel;
}
@Override
public void checkpointStopped(long checkpointId) {
this.lastStoppedCheckpointId = checkpointId;
}
protected abstract InputChannel toInputChannelInternal() throws IOException;
CompletableFuture<?> getStateConsumedFuture() {
return stateConsumedFuture;
}
public void onRecoveredStateBuffer(Buffer buffer) {
boolean recycleBuffer = true;
NetworkActionsLogger.traceRecover(
"InputChannelRecoveredStateHandler#recover",
buffer,
inputGate.getOwningTaskName(),
channelInfo);
try {
final boolean wasEmpty;
synchronized (receivedBuffers) {
// Similar to notifyBufferAvailable(), make sure that we never add a buffer
// after releaseAllResources() released all buffers from receivedBuffers.
if (isReleased) {
wasEmpty = false;
} else {
wasEmpty = receivedBuffers.isEmpty();
receivedBuffers.add(buffer);
recycleBuffer = false;
}
}
if (wasEmpty) {
notifyChannelNonEmpty();
}
} finally {
if (recycleBuffer) {
buffer.recycleBuffer();
}
}
}
public void finishReadRecoveredState() throws IOException {
onRecoveredStateBuffer(EventSerializer.toBuffer(EndOfChannelStateEvent.INSTANCE, false));
bufferManager.releaseFloatingBuffers();
LOG.debug("{}/{} finished recovering input.", inputGate.getOwningTaskName(), channelInfo);
}
@Nullable
private BufferAndAvailability getNextRecoveredStateBuffer() throws IOException {
final Buffer next;
final Buffer.DataType nextDataType;
synchronized (receivedBuffers) {
checkState(!isReleased, "Trying to read from released RecoveredInputChannel");
next = receivedBuffers.poll();
nextDataType = peekDataTypeUnsafe();
}
if (next == null) {
return null;
} else if (isEndOfChannelStateEvent(next)) {
stateConsumedFuture.complete(null);
return null;
} else {
return new BufferAndAvailability(next, nextDataType, 0, sequenceNumber++);
}
}
private boolean isEndOfChannelStateEvent(Buffer buffer) throws IOException {
if (buffer.isBuffer()) {
return false;
}
AbstractEvent event = EventSerializer.fromBuffer(buffer, getClass().getClassLoader());
buffer.setReaderIndex(0);
return event.getClass() == EndOfChannelStateEvent.class;
}
@Override
Optional<BufferAndAvailability> getNextBuffer() throws IOException {
checkError();
return Optional.ofNullable(getNextRecoveredStateBuffer());
}
private Buffer.DataType peekDataTypeUnsafe() {
assert Thread.holdsLock(receivedBuffers);
final Buffer first = receivedBuffers.peek();
return first != null ? first.getDataType() : Buffer.DataType.NONE;
}
@Override
public void resumeConsumption() {
throw new UnsupportedOperationException("RecoveredInputChannel should never be blocked.");
}
@Override
final void requestSubpartition(int subpartitionIndex) {
throw new UnsupportedOperationException(
"RecoveredInputChannel should never request partition.");
}
@Override
void sendTaskEvent(TaskEvent event) {
throw new UnsupportedOperationException(
"RecoveredInputChannel should never send any task events.");
}
@Override
boolean isReleased() {
synchronized (receivedBuffers) {
return isReleased;
}
}
void releaseAllResources() throws IOException {
ArrayDeque<Buffer> releasedBuffers = new ArrayDeque<>();
boolean shouldRelease = false;
synchronized (receivedBuffers) {
if (!isReleased) {
isReleased = true;
shouldRelease = true;
releasedBuffers.addAll(receivedBuffers);
receivedBuffers.clear();
}
}
if (shouldRelease) {
bufferManager.releaseAllBuffers(releasedBuffers);
}
}
@VisibleForTesting
protected int getNumberOfQueuedBuffers() {
synchronized (receivedBuffers) {
return receivedBuffers.size();
}
}
public Buffer requestBufferBlocking() throws InterruptedException, IOException {
// not in setup to avoid assigning buffers unnecessarily if there is no state
if (!exclusiveBuffersAssigned) {
bufferManager.requestExclusiveBuffers(networkBuffersPerChannel);
exclusiveBuffersAssigned = true;
}
return bufferManager.requestBufferBlocking();
}
@Override
public void checkpointStarted(CheckpointBarrier barrier) throws CheckpointException {
throw new CheckpointException(CHECKPOINT_DECLINED_TASK_NOT_READY);
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.nuklear;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* <h3>Member documentation</h3>
*
* <ul>
* <li>{@code userdata} – user provided font handle</li>
* <li>{@code height} – max height of the font</li>
* <li>{@code width} – font string width in pixel callback</li>
* <li>{@code query} – font glyph callback to query drawing info</li>
* <li>{@code texture} – texture handle to the used font atlas or texture</li>
* </ul>
*
* <h3>Layout</h3>
*
* <pre><code>struct nk_user_font {
{@link NkHandle nk_handle} userdata;
float height;
nk_text_width_f width;
nk_query_font_glyph_f query;
{@link NkHandle nk_handle} texture;
}</code></pre>
*/
public class NkUserFont extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
USERDATA,
HEIGHT,
WIDTH,
QUERY,
TEXTURE;
static {
Layout layout = __struct(
__member(NkHandle.SIZEOF, NkHandle.ALIGNOF),
__member(4),
__member(POINTER_SIZE),
__member(POINTER_SIZE),
__member(NkHandle.SIZEOF, NkHandle.ALIGNOF)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
USERDATA = layout.offsetof(0);
HEIGHT = layout.offsetof(1);
WIDTH = layout.offsetof(2);
QUERY = layout.offsetof(3);
TEXTURE = layout.offsetof(4);
}
NkUserFont(long address, ByteBuffer container) {
super(address, container);
}
/**
* Creates a {@link NkUserFont} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public NkUserFont(ByteBuffer container) {
this(memAddress(container), checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** Returns a {@link NkHandle} view of the {@code userdata} field. */
public NkHandle userdata() { return nuserdata(address()); }
/** Returns the value of the {@code height} field. */
public float height() { return nheight(address()); }
/** Returns the {@code NkTextWidthCallback} instance at the {@code width} field. */
public NkTextWidthCallback width() { return NkTextWidthCallback.create(nwidth(address())); }
/** Returns the {@code NkQueryFontGlyphCallback} instance at the {@code query} field. */
public NkQueryFontGlyphCallback query() { return NkQueryFontGlyphCallback.create(nquery(address())); }
/** Returns a {@link NkHandle} view of the {@code texture} field. */
public NkHandle texture() { return ntexture(address()); }
/** Copies the specified {@link NkHandle} to the {@code userdata} field. */
public NkUserFont userdata(NkHandle value) { nuserdata(address(), value); return this; }
/** Sets the specified value to the {@code height} field. */
public NkUserFont height(float value) { nheight(address(), value); return this; }
/** Sets the address of the specified {@link NkTextWidthCallbackI} to the {@code width} field. */
public NkUserFont width(NkTextWidthCallbackI value) { nwidth(address(), addressSafe(value)); return this; }
/** Sets the address of the specified {@link NkQueryFontGlyphCallbackI} to the {@code query} field. */
public NkUserFont query(NkQueryFontGlyphCallbackI value) { nquery(address(), addressSafe(value)); return this; }
/** Copies the specified {@link NkHandle} to the {@code texture} field. */
public NkUserFont texture(NkHandle value) { ntexture(address(), value); return this; }
/** Unsafe version of {@link #set(NkUserFont) set}. */
public NkUserFont nset(long struct) {
memCopy(struct, address(), SIZEOF);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public NkUserFont set(NkUserFont src) {
return nset(src.address());
}
// -----------------------------------
/** Returns a new {@link NkUserFont} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static NkUserFont malloc() {
return create(nmemAlloc(SIZEOF));
}
/** Returns a new {@link NkUserFont} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static NkUserFont calloc() {
return create(nmemCalloc(1, SIZEOF));
}
/** Returns a new {@link NkUserFont} instance allocated with {@link BufferUtils}. */
public static NkUserFont create() {
return new NkUserFont(BufferUtils.createByteBuffer(SIZEOF));
}
/** Returns a new {@link NkUserFont} instance for the specified memory address or {@code null} if the address is {@code NULL}. */
public static NkUserFont create(long address) {
return address == NULL ? null : new NkUserFont(address, null);
}
/**
* Returns a new {@link NkUserFont.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer malloc(int capacity) {
return create(nmemAlloc(capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link NkUserFont.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer calloc(int capacity) {
return create(nmemCalloc(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link NkUserFont.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static Buffer create(int capacity) {
return new Buffer(BufferUtils.createByteBuffer(capacity * SIZEOF));
}
/**
* Create a {@link NkUserFont.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static Buffer create(long address, int capacity) {
return address == NULL ? null : new Buffer(address, null, -1, 0, capacity, capacity);
}
// -----------------------------------
/** Returns a new {@link NkUserFont} instance allocated on the thread-local {@link MemoryStack}. */
public static NkUserFont mallocStack() {
return mallocStack(stackGet());
}
/** Returns a new {@link NkUserFont} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. */
public static NkUserFont callocStack() {
return callocStack(stackGet());
}
/**
* Returns a new {@link NkUserFont} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static NkUserFont mallocStack(MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@link NkUserFont} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static NkUserFont callocStack(MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link NkUserFont.Buffer} instance allocated on the thread-local {@link MemoryStack}.
*
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity) {
return mallocStack(capacity, stackGet());
}
/**
* Returns a new {@link NkUserFont.Buffer} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero.
*
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity) {
return callocStack(capacity, stackGet());
}
/**
* Returns a new {@link NkUserFont.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity, MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link NkUserFont.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity, MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #userdata}. */
public static NkHandle nuserdata(long struct) { return NkHandle.create(struct + NkUserFont.USERDATA); }
/** Unsafe version of {@link #height}. */
public static float nheight(long struct) { return memGetFloat(struct + NkUserFont.HEIGHT); }
/** Unsafe version of {@link #width}. */
public static long nwidth(long struct) { return memGetAddress(struct + NkUserFont.WIDTH); }
/** Unsafe version of {@link #query}. */
public static long nquery(long struct) { return memGetAddress(struct + NkUserFont.QUERY); }
/** Unsafe version of {@link #texture}. */
public static NkHandle ntexture(long struct) { return NkHandle.create(struct + NkUserFont.TEXTURE); }
/** Unsafe version of {@link #userdata(NkHandle) userdata}. */
public static void nuserdata(long struct, NkHandle value) { memCopy(value.address(), struct + NkUserFont.USERDATA, NkHandle.SIZEOF); }
/** Unsafe version of {@link #height(float) height}. */
public static void nheight(long struct, float value) { memPutFloat(struct + NkUserFont.HEIGHT, value); }
/** Unsafe version of {@link #width(NkTextWidthCallbackI) width}. */
public static void nwidth(long struct, long value) { memPutAddress(struct + NkUserFont.WIDTH, value); }
/** Unsafe version of {@link #query(NkQueryFontGlyphCallbackI) query}. */
public static void nquery(long struct, long value) { memPutAddress(struct + NkUserFont.QUERY, value); }
/** Unsafe version of {@link #texture(NkHandle) texture}. */
public static void ntexture(long struct, NkHandle value) { memCopy(value.address(), struct + NkUserFont.TEXTURE, NkHandle.SIZEOF); }
// -----------------------------------
/** An array of {@link NkUserFont} structs. */
public static class Buffer extends StructBuffer<NkUserFont, Buffer> implements NativeResource {
/**
* Creates a new {@link NkUserFont.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link NkUserFont#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
Buffer(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected Buffer newBufferInstance(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
return new Buffer(address, container, mark, pos, lim, cap);
}
@Override
protected NkUserFont newInstance(long address) {
return new NkUserFont(address, container);
}
@Override
protected int sizeof() {
return SIZEOF;
}
/** Returns a {@link NkHandle} view of the {@code userdata} field. */
public NkHandle userdata() { return NkUserFont.nuserdata(address()); }
/** Returns the value of the {@code height} field. */
public float height() { return NkUserFont.nheight(address()); }
/** Returns the {@code NkTextWidthCallback} instance at the {@code width} field. */
public NkTextWidthCallback width() { return NkTextWidthCallback.create(NkUserFont.nwidth(address())); }
/** Returns the {@code NkQueryFontGlyphCallback} instance at the {@code query} field. */
public NkQueryFontGlyphCallback query() { return NkQueryFontGlyphCallback.create(NkUserFont.nquery(address())); }
/** Returns a {@link NkHandle} view of the {@code texture} field. */
public NkHandle texture() { return NkUserFont.ntexture(address()); }
/** Copies the specified {@link NkHandle} to the {@code userdata} field. */
public NkUserFont.Buffer userdata(NkHandle value) { NkUserFont.nuserdata(address(), value); return this; }
/** Sets the specified value to the {@code height} field. */
public NkUserFont.Buffer height(float value) { NkUserFont.nheight(address(), value); return this; }
/** Sets the address of the specified {@link NkTextWidthCallbackI} to the {@code width} field. */
public NkUserFont.Buffer width(NkTextWidthCallbackI value) { NkUserFont.nwidth(address(), addressSafe(value)); return this; }
/** Sets the address of the specified {@link NkQueryFontGlyphCallbackI} to the {@code query} field. */
public NkUserFont.Buffer query(NkQueryFontGlyphCallbackI value) { NkUserFont.nquery(address(), addressSafe(value)); return this; }
/** Copies the specified {@link NkHandle} to the {@code texture} field. */
public NkUserFont.Buffer texture(NkHandle value) { NkUserFont.ntexture(address(), value); return this; }
}
}
| |
/*
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.tilt.minka.core.leader.distributor;
import static io.tilt.minka.domain.EntityEvent.CREATE;
import static io.tilt.minka.domain.EntityEvent.REMOVE;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.tilt.minka.api.Config;
import io.tilt.minka.core.leader.balancer.Balancer;
import io.tilt.minka.core.leader.balancer.Spot;
import io.tilt.minka.core.leader.data.CommittedState;
import io.tilt.minka.core.leader.data.DirtyState;
import io.tilt.minka.core.leader.data.Scheme;
import io.tilt.minka.core.task.LeaderAware;
import io.tilt.minka.domain.EntityEvent;
import io.tilt.minka.domain.EntityState;
import io.tilt.minka.domain.Heartbeat;
import io.tilt.minka.domain.ShardEntity;
import io.tilt.minka.model.Duty;
import io.tilt.minka.model.Pallet;
import io.tilt.minka.shard.ShardCapacity;
import io.tilt.minka.shard.ShardState;
import io.tilt.minka.utils.LogUtils;
/**
* Factory for {@linkplain ChangePlan} instances when neccesary.
* Calls the configred {@linkplain Balancer} for each group of duties of the same {@linkplain Pallet}
* Balancers use a {@linkplain Migrator} to plan its shippings, which are later written to the plan.
*
* @author Cristian Gonzalez
* @since Ene 4, 2015
*/
class ChangePlanFactory {
private static final Logger logger = LoggerFactory.getLogger(ChangePlanFactory.class);
private final Config config;
private final String name = getClass().getSimpleName();
private final LeaderAware aware;
ChangePlanFactory(final Config config, final LeaderAware aware) {
this.config = config;
this.aware = aware;
}
/** @return a plan if there're changes to apply or NULL if not */
ChangePlan create(final Scheme scheme, final ChangePlan previous) {
final long now = System.currentTimeMillis();
final DirtyState snapshot = scheme.getDirty().snapshot();
ChangePlan plan = new ChangePlan(
config.beatToMs(config.getDistributor().getPlanExpiration()),
config.getDistributor().getPlanMaxRetries(),
previous==null ? 0 : previous.getVersion());
// to record initial pid and detect lazy surviving followers
if (scheme.getCurrentPlan() == null && previous == null) {
scheme.setFirstPlanId(plan.getId());
}
final boolean any = detectApplyFeatures(previous, plan, snapshot, scheme.getCommitedState());
final DirtyCompiler compiler = new DirtyCompiler(scheme.getCommitedState(), previous, plan, snapshot);
final Set<ShardEntity> ents = new HashSet<>();
scheme.getCommitedState().findDuties(ents::add);
ents.addAll(compiler.getCreations());
final Map<String, List<ShardEntity>> schemeByPallets = ents.stream()
.collect(Collectors.groupingBy(e -> e.getDuty().getPalletId()));
if (schemeByPallets.isEmpty()) {
logger.warn("{}: CommittedState and DirtyState are empty. Nothing to balance (C:{}, R:{})",
name, compiler.getCreations().size(), compiler.getDeletions().size());
plan = null;
} else {
if (!build(scheme, plan, compiler, schemeByPallets)) {
plan = null;
} else if (!any) {
plan.addFeature(ChangeFeature.REBALANCE);
}
}
scheme.getDirty().dropSnapshotToRunning();
logger.info("Factory: {}", System.currentTimeMillis() - now);
return plan;
}
/** for monitoring only - has no impact */
private boolean detectApplyFeatures(
final ChangePlan prev,
final ChangePlan plan,
final DirtyState snapshot,
final CommittedState state) {
boolean[] any = {false};
if (snapshot.commitRequestsSize()>0) {
any[0] |= plan.addFeature(ChangeFeature.COMMIT_REQUEST);
}
if (snapshot.isLimitedPromotion()) {
any[0] |= plan.addFeature(ChangeFeature.LIMITED_PROMOTION);
}
// care only features appeared on HBs out of last plan
if (prev!=null) {
final long since = prev.getCreation().toEpochMilli();
state.findShards(null, shard-> {
for (final Heartbeat hb: shard.getHeartbeats().values()) {
if (hb.getReception().isAfter(since) && hb.getFeature()!=null) {
any[0] |= plan.addFeature(hb.getFeature());
}
}
});
}
for (ChangeFeature f: snapshot.getFeatures()) {
any[0] |= plan.addFeature(f);
}
if (!snapshot.getDisturbance(EntityState.DANGLING).isEmpty()) {
any[0] |= plan.addFeature(ChangeFeature.RESTORE_DANGLING);
}
if (!snapshot.getDisturbance(EntityState.MISSING).isEmpty()) {
any[0] |= plan.addFeature(ChangeFeature.RESTORE_MISSING);
}
return any[0];
}
private boolean build(
final Scheme scheme, final ChangePlan changePlan,
final DirtyCompiler compiler,
final Map<String, List<ShardEntity>> schemeByPallets) {
try {
boolean changes = false;
final Replicator replicator = new Replicator(aware.getLeaderShardId(), scheme);
for (final Map.Entry<String, List<ShardEntity>> e : schemeByPallets.entrySet()) {
final Pallet pallet = scheme.getCommitedState().getPalletById(e.getKey()).getPallet();
final Balancer balancer = Balancer.Directory.getByStrategy(pallet.getMetadata().getBalancer());
logStatus(scheme, compiler.getCreations(), compiler.getDeletions(), e.getValue(), pallet, balancer);
if (balancer != null) {
final Migrator migra = balance(scheme, pallet, balancer, compiler.getCreations(), compiler.getDeletions());
changes |=migra.write(changePlan);
if (replicator.write(changePlan, compiler, pallet)) {
changes = true;
changePlan.addFeature(ChangeFeature.REPLICATION_EVENTS);
}
} else {
logger.warn("{}: Balancer not found ! {} set on Pallet: {} (curr size:{}) ", name,
pallet.getMetadata().getBalancer(), pallet, Balancer.Directory.getAll().size());
}
}
// only when everything went well otherwise'd be lost
scheme.getDirty().cleanAllocatedDisturbance(EntityState.DANGLING, null);
scheme.getDirty().cleanAllocatedDisturbance(EntityState.MISSING, null);
if (!changes && compiler.getDeletions().isEmpty()) {
return false;
}
} catch (Exception e) {
logger.error("{}: Cancelling ChangePlan building", name, e);
return false;
}
return true;
}
private static final Migrator balance(
final Scheme partition,
final Pallet pallet,
final Balancer balancer,
final Set<ShardEntity> dutyCreations,
final Set<ShardEntity> dutyDeletions) {
final Set<ShardEntity> removes = dutyDeletions.stream()
.filter(d -> d.getDuty().getPalletId().equals(pallet.getId()))
.collect(Collectors.toSet());
final Set<ShardEntity> adds = dutyCreations.stream()
.filter(d -> d.getDuty().getPalletId().equals(pallet.getId()))
.collect(Collectors.toSet());
final Set<ShardEntity> sourceRefs = new HashSet<>(removes.size() + adds.size());
final Map<Spot, Set<Duty>> scheme = new TreeMap<>();
// add the currently distributed duties
partition.getCommitedState().findShards(ShardState.ONLINE.filter(), shard-> {
final Set<Duty> located = new HashSet<>();
partition.getCommitedState().findDuties(shard, pallet, d-> {
located.add(d.getDuty());
sourceRefs.add(d);
});
scheme.put(new Spot(shard), located);
});
// add the currently distributed duties
sourceRefs.addAll(removes);
sourceRefs.addAll(adds);
final Migrator migrator = new Migrator(partition, pallet, sourceRefs);
final Map<EntityEvent, Set<Duty>> stage = new HashMap<>(2);
stage.put(CREATE, refs(adds));
stage.put(REMOVE, refs(removes));
balancer.balance(pallet, scheme, stage, migrator);
return migrator;
}
private static final Set<Duty> refs(final Set<ShardEntity> entities) {
final Set<Duty> ret = new HashSet<>(entities.size());
entities.forEach(e -> ret.add(e.getDuty()));
return ret;
}
private void logStatus(
final Scheme partition,
final Set<ShardEntity> dutyCreations,
final Set<ShardEntity> dutyDeletions,
final List<ShardEntity> duties,
final Pallet pallet,
final Balancer balancer) {
if (!logger.isInfoEnabled()) {
return;
}
logger.info(LogUtils.titleLine(LogUtils.HYPHEN_CHAR, "Building Pallet: %s for %s", pallet.getId(),
balancer.getClass().getSimpleName()));
final double[] clusterCapacity = new double[1];
partition.getCommitedState().findShards(ShardState.ONLINE.filter(), node-> {
final ShardCapacity cap = node.getCapacities().get(pallet);
final double currTotal = cap == null ? 0 : cap.getTotal();
logger.info("{}: Capacity Shard {} : {}", name, node.toString(), currTotal);
clusterCapacity[0] += currTotal;
});
logger.info("{}: Cluster capacity: {}", name, clusterCapacity);
logger.info("{}: Living #{}", name,
new CommittedState.SchemeExtractor(partition.getCommitedState()).getAccountConfirmed(pallet));
final Set<ShardEntity> creapa = dutyCreations.stream()
.filter(d->d.getDuty().getPalletId().equals(pallet.getId()))
.collect(Collectors.toSet());
if (!creapa.isEmpty() && logger.isInfoEnabled()) {
logger.info("{}: Add +{}: {}", name, creapa.size(), ShardEntity.toStringIds(creapa));
}
final Set<ShardEntity> delepa = dutyDeletions.stream()
.filter(d->d.getDuty().getPalletId().equals(pallet.getId()))
.collect(Collectors.toSet());
if (!delepa.isEmpty() && logger.isInfoEnabled()) {
logger.info("{}: Rem -{}: {}", name, delepa.size(), ShardEntity.toStringIds(delepa));
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* A bulk request holds an ordered {@link IndexRequest}s, {@link DeleteRequest}s and {@link UpdateRequest}s
* and allows to executes it in a single batch.
*
* Note that we only support refresh on the bulk request not per item.
* @see org.elasticsearch.client.Client#bulk(BulkRequest)
*/
public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest<BulkRequest> {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(BulkRequest.class));
private static final int REQUEST_OVERHEAD = 50;
/**
* Requests that are part of this request. It is only possible to add things that are both {@link ActionRequest}s and
* {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare
* the one with the least casts.
*/
final List<DocWriteRequest> requests = new ArrayList<>();
List<Object> payloads = null;
protected TimeValue timeout = BulkShardRequest.DEFAULT_TIMEOUT;
private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT;
private RefreshPolicy refreshPolicy = RefreshPolicy.NONE;
private long sizeInBytes = 0;
public BulkRequest() {
}
/**
* Adds a list of requests to be executed. Either index or delete requests.
*/
public BulkRequest add(DocWriteRequest... requests) {
for (DocWriteRequest request : requests) {
add(request, null);
}
return this;
}
public BulkRequest add(DocWriteRequest request) {
return add(request, null);
}
/**
* Add a request to the current BulkRequest.
* @param request Request to add
* @param payload Optional payload
* @return the current bulk request
*/
public BulkRequest add(DocWriteRequest request, @Nullable Object payload) {
if (request instanceof IndexRequest) {
add((IndexRequest) request, payload);
} else if (request instanceof DeleteRequest) {
add((DeleteRequest) request, payload);
} else if (request instanceof UpdateRequest) {
add((UpdateRequest) request, payload);
} else {
throw new IllegalArgumentException("No support for request [" + request + "]");
}
return this;
}
/**
* Adds a list of requests to be executed. Either index or delete requests.
*/
public BulkRequest add(Iterable<DocWriteRequest> requests) {
for (DocWriteRequest request : requests) {
add(request);
}
return this;
}
/**
* Adds an {@link IndexRequest} to the list of actions to execute. Follows the same behavior of {@link IndexRequest}
* (for example, if no id is provided, one will be generated, or usage of the create flag).
*/
public BulkRequest add(IndexRequest request) {
return internalAdd(request, null);
}
public BulkRequest add(IndexRequest request, @Nullable Object payload) {
return internalAdd(request, payload);
}
BulkRequest internalAdd(IndexRequest request, @Nullable Object payload) {
Objects.requireNonNull(request, "'request' must not be null");
requests.add(request);
addPayload(payload);
// lack of source is validated in validate() method
sizeInBytes += (request.source() != null ? request.source().length() : 0) + REQUEST_OVERHEAD;
return this;
}
/**
* Adds an {@link UpdateRequest} to the list of actions to execute.
*/
public BulkRequest add(UpdateRequest request) {
return internalAdd(request, null);
}
public BulkRequest add(UpdateRequest request, @Nullable Object payload) {
return internalAdd(request, payload);
}
BulkRequest internalAdd(UpdateRequest request, @Nullable Object payload) {
Objects.requireNonNull(request, "'request' must not be null");
requests.add(request);
addPayload(payload);
if (request.doc() != null) {
sizeInBytes += request.doc().source().length();
}
if (request.upsertRequest() != null) {
sizeInBytes += request.upsertRequest().source().length();
}
if (request.script() != null) {
sizeInBytes += request.script().getIdOrCode().length() * 2;
}
return this;
}
/**
* Adds an {@link DeleteRequest} to the list of actions to execute.
*/
public BulkRequest add(DeleteRequest request) {
return add(request, null);
}
public BulkRequest add(DeleteRequest request, @Nullable Object payload) {
Objects.requireNonNull(request, "'request' must not be null");
requests.add(request);
addPayload(payload);
sizeInBytes += REQUEST_OVERHEAD;
return this;
}
private void addPayload(Object payload) {
if (payloads == null) {
if (payload == null) {
return;
}
payloads = new ArrayList<>(requests.size() + 10);
// add requests#size-1 elements to the payloads if it null (we add for an *existing* request)
for (int i = 1; i < requests.size(); i++) {
payloads.add(null);
}
}
payloads.add(payload);
}
/**
* The list of requests in this bulk request.
*/
public List<DocWriteRequest> requests() {
return this.requests;
}
@Override
public List<? extends IndicesRequest> subRequests() {
return requests.stream().collect(Collectors.toList());
}
/**
* The list of optional payloads associated with requests in the same order as the requests. Note, elements within
* it might be null if no payload has been provided.
* <p>
* Note, if no payloads have been provided, this method will return null (as to conserve memory overhead).
*/
@Nullable
public List<Object> payloads() {
return this.payloads;
}
/**
* The number of actions in the bulk request.
*/
public int numberOfActions() {
return requests.size();
}
/**
* The estimated size in bytes of the bulk request.
*/
public long estimatedSizeInBytes() {
return sizeInBytes;
}
/**
* Adds a framed data in binary format
*/
public BulkRequest add(byte[] data, int from, int length) throws IOException {
return add(data, from, length, null, null);
}
/**
* Adds a framed data in binary format
*/
public BulkRequest add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType) throws IOException {
return add(new BytesArray(data, from, length), defaultIndex, defaultType);
}
/**
* Adds a framed data in binary format
*/
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws IOException {
return add(data, defaultIndex, defaultType, null, null, null, null, null, true);
}
/**
* Adds a framed data in binary format
*/
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex) throws IOException {
return add(data, defaultIndex, defaultType, null, null, null, null, null, allowExplicitIndex);
}
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex) throws IOException {
XContent xContent = XContentFactory.xContent(data);
int line = 0;
int from = 0;
int length = data.length();
byte marker = xContent.streamSeparator();
while (true) {
int nextMarker = findNextMarker(marker, from, data, length);
if (nextMarker == -1) {
break;
}
line++;
// now parse the action
try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) {
// move pointers
from = nextMarker + 1;
// Move to START_OBJECT
XContentParser.Token token = parser.nextToken();
if (token == null) {
continue;
}
assert token == XContentParser.Token.START_OBJECT;
// Move to FIELD_NAME, that's the action
token = parser.nextToken();
assert token == XContentParser.Token.FIELD_NAME;
String action = parser.currentName();
String index = defaultIndex;
String type = defaultType;
String id = null;
String routing = defaultRouting;
String parent = null;
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
String[] fields = defaultFields;
String timestamp = null;
TimeValue ttl = null;
String opType = null;
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
int retryOnConflict = 0;
String pipeline = defaultPipeline;
// at this stage, next token can either be END_OBJECT (and use default index and type, with auto generated id)
// or START_OBJECT which will have another set of parameters
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in bulk is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text();
} else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) {
timestamp = parser.text();
} else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) {
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName);
} else {
ttl = new TimeValue(parser.longValue());
}
} else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
opType = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
versionType = VersionType.fromString(parser.text());
} else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) {
retryOnConflict = parser.intValue();
} else if ("pipeline".equals(currentFieldName)) {
pipeline = parser.text();
} else if ("fields".equals(currentFieldName)) {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
} else if ("_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.parse(parser);
} else {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("fields".equals(currentFieldName)) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
List<Object> values = parser.list();
fields = values.toArray(new String[values.size()]);
} else {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
}
} else if (token == XContentParser.Token.START_OBJECT && "_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.parse(parser);
} else if (token != XContentParser.Token.VALUE_NULL) {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
}
}
} else if (token != XContentParser.Token.END_OBJECT) {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected " + XContentParser.Token.START_OBJECT
+ " or " + XContentParser.Token.END_OBJECT + " but found [" + token + "]");
}
if ("delete".equals(action)) {
add(new DeleteRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType), payload);
} else {
nextMarker = findNextMarker(marker, from, data, length);
if (nextMarker == -1) {
break;
}
line++;
// order is important, we set parent after routing, so routing will be set to parent if not set explicitly
// we use internalAdd so we don't fork here, this allows us not to copy over the big byte array to small chunks
// of index request.
if ("index".equals(action)) {
if (opType == null) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
.setPipeline(pipeline).source(data.slice(from, nextMarker - from)), payload);
} else {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
.create("create".equals(opType)).setPipeline(pipeline)
.source(data.slice(from, nextMarker - from)), payload);
}
} else if ("create".equals(action)) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
.create(true).setPipeline(pipeline)
.source(data.slice(from, nextMarker - from)), payload);
} else if ("update".equals(action)) {
UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict)
.version(version).versionType(versionType)
.routing(routing)
.parent(parent)
.fromXContent(data.slice(from, nextMarker - from));
if (fetchSourceContext != null) {
updateRequest.fetchSource(fetchSourceContext);
}
if (fields != null) {
updateRequest.fields(fields);
}
IndexRequest upsertRequest = updateRequest.upsertRequest();
if (upsertRequest != null) {
upsertRequest.timestamp(timestamp);
upsertRequest.ttl(ttl);
upsertRequest.version(version);
upsertRequest.versionType(versionType);
}
IndexRequest doc = updateRequest.doc();
if (doc != null) {
doc.timestamp(timestamp);
doc.ttl(ttl);
doc.version(version);
doc.versionType(versionType);
}
internalAdd(updateRequest, payload);
}
// move pointers
from = nextMarker + 1;
}
}
}
return this;
}
/**
* Sets the number of shard copies that must be active before proceeding with the write.
* See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details.
*/
public BulkRequest waitForActiveShards(ActiveShardCount waitForActiveShards) {
this.waitForActiveShards = waitForActiveShards;
return this;
}
/**
* A shortcut for {@link #waitForActiveShards(ActiveShardCount)} where the numerical
* shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)}
* to get the ActiveShardCount.
*/
public BulkRequest waitForActiveShards(final int waitForActiveShards) {
return waitForActiveShards(ActiveShardCount.from(waitForActiveShards));
}
public ActiveShardCount waitForActiveShards() {
return this.waitForActiveShards;
}
@Override
public BulkRequest setRefreshPolicy(RefreshPolicy refreshPolicy) {
this.refreshPolicy = refreshPolicy;
return this;
}
@Override
public RefreshPolicy getRefreshPolicy() {
return refreshPolicy;
}
/**
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
*/
public final BulkRequest timeout(TimeValue timeout) {
this.timeout = timeout;
return this;
}
/**
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
*/
public final BulkRequest timeout(String timeout) {
return timeout(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"));
}
public TimeValue timeout() {
return timeout;
}
private int findNextMarker(byte marker, int from, BytesReference data, int length) {
for (int i = from; i < length; i++) {
if (data.get(i) == marker) {
return i;
}
}
return -1;
}
/**
* @return Whether this bulk request contains index request with an ingest pipeline enabled.
*/
public boolean hasIndexRequestsWithPipelines() {
for (DocWriteRequest actionRequest : requests) {
if (actionRequest instanceof IndexRequest) {
IndexRequest indexRequest = (IndexRequest) actionRequest;
if (Strings.hasText(indexRequest.getPipeline())) {
return true;
}
}
}
return false;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (requests.isEmpty()) {
validationException = addValidationError("no requests added", validationException);
}
for (DocWriteRequest request : requests) {
// We first check if refresh has been set
if (((WriteRequest<?>) request).getRefreshPolicy() != RefreshPolicy.NONE) {
validationException = addValidationError(
"RefreshPolicy is not supported on an item request. Set it on the BulkRequest instead.", validationException);
}
ActionRequestValidationException ex = ((WriteRequest<?>) request).validate();
if (ex != null) {
if (validationException == null) {
validationException = new ActionRequestValidationException();
}
validationException.addValidationErrors(ex.validationErrors());
}
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
waitForActiveShards = ActiveShardCount.readFrom(in);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
requests.add(DocWriteRequest.readDocumentRequest(in));
}
refreshPolicy = RefreshPolicy.readFrom(in);
timeout = new TimeValue(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
waitForActiveShards.writeTo(out);
out.writeVInt(requests.size());
for (DocWriteRequest request : requests) {
DocWriteRequest.writeDocumentRequest(out, request);
}
refreshPolicy.writeTo(out);
timeout.writeTo(out);
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2011, Sun Microsystems, Inc., Kohsuke Kawaguchi,
* Yahoo!, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.search;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import hudson.Util;
import hudson.util.EditDistance;
import java.io.IOException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import org.kohsuke.stapler.Ancestor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.export.DataWriter;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
import org.kohsuke.stapler.export.Flavor;
/**
* Web-bound object that provides search/navigation capability.
*
* <p>
* This object is bound to "./search" of a model object via {@link SearchableModelObject} and serves
* HTTP requests coming from JavaScript to provide search result and auto-completion.
*
* @author Kohsuke Kawaguchi
* @see SearchableModelObject
*/
public class Search {
public void doIndex(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
List<Ancestor> l = req.getAncestors();
for( int i=l.size()-1; i>=0; i-- ) {
Ancestor a = l.get(i);
if (a.getObject() instanceof SearchableModelObject) {
SearchableModelObject smo = (SearchableModelObject) a.getObject();
if(LOGGER.isLoggable(Level.FINE)){
LOGGER.fine(String.format("smo.displayName=%s, searchName=%s",smo.getDisplayName(), smo.getSearchName()));
}
SearchIndex index = smo.getSearchIndex();
String query = req.getParameter("q");
if(query!=null) {
SuggestedItem target = find(index, query, smo);
if(target!=null) {
// found
rsp.sendRedirect2(a.getUrl()+target.getUrl());
return;
}
}
}
}
// no exact match. show the suggestions
rsp.setStatus(SC_NOT_FOUND);
req.getView(this,"search-failed.jelly").forward(req,rsp);
}
/**
* Used by OpenSearch auto-completion. Returns JSON array of the form:
*
* <pre>
* ["queryString",["comp1","comp2",...]]
* </pre>
*
* See http://developer.mozilla.org/en/docs/Supporting_search_suggestions_in_search_plugins
*/
public void doSuggestOpenSearch(StaplerRequest req, StaplerResponse rsp, @QueryParameter String q) throws IOException, ServletException {
rsp.setContentType(Flavor.JSON.contentType);
DataWriter w = Flavor.JSON.createDataWriter(null, rsp);
w.startArray();
w.value(q);
w.startArray();
for (SuggestedItem item : getSuggestions(req, q))
w.value(item.getPath());
w.endArray();
w.endArray();
}
/**
* Used by search box auto-completion. Returns JSON array.
*/
public void doSuggest(StaplerRequest req, StaplerResponse rsp, @QueryParameter String query) throws IOException, ServletException {
Result r = new Result();
for (SuggestedItem item : getSuggestions(req, query))
r.suggestions.add(new Item(item.getPath()));
rsp.serveExposedBean(req,r,Flavor.JSON);
}
/**
* Gets the list of suggestions that match the given query.
*
* @return
* can be empty but never null. The size of the list is always smaller than
* a certain threshold to avoid showing too many options.
*/
public SearchResult getSuggestions(StaplerRequest req, String query) {
Set<String> paths = new HashSet<String>(); // paths already added, to control duplicates
SearchResultImpl r = new SearchResultImpl();
int max = req.hasParameter("max") ? Integer.parseInt(req.getParameter("max")) : 20;
SearchableModelObject smo = findClosestSearchableModelObject(req);
for (SuggestedItem i : suggest(makeSuggestIndex(req), query, smo)) {
if(r.size()>=max) {
r.hasMoreResults = true;
break;
}
if(paths.add(i.getPath()))
r.add(i);
}
return r;
}
private SearchableModelObject findClosestSearchableModelObject(StaplerRequest req) {
List<Ancestor> l = req.getAncestors();
for( int i=l.size()-1; i>=0; i-- ) {
Ancestor a = l.get(i);
if (a.getObject() instanceof SearchableModelObject) {
return (SearchableModelObject)a.getObject();
}
}
return null;
}
/**
* Creates merged search index for suggestion.
*/
private SearchIndex makeSuggestIndex(StaplerRequest req) {
SearchIndexBuilder builder = new SearchIndexBuilder();
for (Ancestor a : req.getAncestors()) {
if (a.getObject() instanceof SearchableModelObject) {
SearchableModelObject smo = (SearchableModelObject) a.getObject();
builder.add(smo.getSearchIndex());
}
}
return builder.make();
}
private static class SearchResultImpl extends ArrayList<SuggestedItem> implements SearchResult {
private boolean hasMoreResults = false;
public boolean hasMoreResults() {
return hasMoreResults;
}
}
@ExportedBean
public static class Result {
@Exported
public List<Item> suggestions = new ArrayList<Item>();
}
@ExportedBean(defaultVisibility=999)
public static class Item {
@Exported
public String name;
public Item(String name) {
this.name = name;
}
}
private enum Mode {
FIND {
void find(SearchIndex index, String token, List<SearchItem> result) {
index.find(token, result);
}
},
SUGGEST {
void find(SearchIndex index, String token, List<SearchItem> result) {
index.suggest(token, result);
}
};
abstract void find(SearchIndex index, String token, List<SearchItem> result);
}
/**
* When there are mutiple suggested items, this method can narrow down the resultset
* to the SuggestedItem that has a url that contains the query. This is useful is one
* job has a display name that matches another job's project name.
* @param r A list of Suggested items. It is assumed that there is at least one
* SuggestedItem in r.
* @param query A query string
* @return Returns the SuggestedItem which has a search url that contains the query.
* If no SuggestedItems have a search url which contains the query, then the first
* SuggestedItem in the List is returned.
*/
static SuggestedItem findClosestSuggestedItem(List<SuggestedItem> r, String query) {
for(SuggestedItem curItem : r) {
if(LOGGER.isLoggable(Level.FINE)) {
LOGGER.fine(String.format("item's searchUrl:%s;query=%s", curItem.item.getSearchUrl(), query));
}
if(curItem.item.getSearchUrl().contains(Util.rawEncode(query))) {
return curItem;
}
}
// couldn't find an item with the query in the url so just
// return the first one
return r.get(0);
}
/**
* @deprecated Use {@link Search#find(SearchIndex, String, SearchableModelObject)} instead.
*/
@Deprecated
public static SuggestedItem find(SearchIndex index, String query) {
return find(index, query, null);
}
/**
* Performs a search and returns the match, or null if no match was found
* or more than one match was found.
* @since 1.527
*/
public static SuggestedItem find(SearchIndex index, String query, SearchableModelObject searchContext) {
List<SuggestedItem> r = find(Mode.FIND, index, query, searchContext);
if(r.isEmpty()){
return null;
}
else if(1==r.size()){
return r.get(0);
}
else {
// we have more than one suggested item, so return the item who's url
// contains the query as this is probably the job's name
return findClosestSuggestedItem(r, query);
}
}
/**
* @deprecated use {@link Search#suggest(SearchIndex, String, SearchableModelObject)} instead.
*/
@Deprecated
public static List<SuggestedItem> suggest(SearchIndex index, final String tokenList) {
return suggest(index, tokenList, null);
}
/**
* @since 1.527
*/
public static List<SuggestedItem> suggest(SearchIndex index, final String tokenList, SearchableModelObject searchContext) {
class Tag implements Comparable<Tag>{
final SuggestedItem item;
final int distance;
/** If the path to this suggestion starts with the token list, 1. Otherwise 0. */
final int prefixMatch;
Tag(SuggestedItem i) {
item = i;
distance = EditDistance.editDistance(i.getPath(),tokenList);
prefixMatch = i.getPath().startsWith(tokenList)?1:0;
}
public int compareTo(Tag that) {
int r = this.prefixMatch -that.prefixMatch;
if(r!=0) return -r; // ones with head match should show up earlier
return this.distance-that.distance;
}
}
List<Tag> buf = new ArrayList<Tag>();
List<SuggestedItem> items = find(Mode.SUGGEST, index, tokenList, searchContext);
// sort them
for( SuggestedItem i : items)
buf.add(new Tag(i));
Collections.sort(buf);
items.clear();
for (Tag t : buf)
items.add(t.item);
return items;
}
static final class TokenList {
private final String[] tokens;
private final static String[] EMPTY = new String[0];
public TokenList(String tokenList) {
tokens = tokenList!=null ? tokenList.split("(?<=\\s)(?=\\S)") : EMPTY;
}
public int length() { return tokens.length; }
/**
* Returns {@link List} such that its <tt>get(end)</tt>
* returns the concatanation of [token_start,...,token_end]
* (both end inclusive.)
*/
public List<String> subSequence(final int start) {
return new AbstractList<String>() {
public String get(int index) {
StringBuilder buf = new StringBuilder();
for(int i=start; i<=start+index; i++ )
buf.append(tokens[i]);
return buf.toString().trim();
}
public int size() {
return tokens.length-start;
}
};
}
public String toString() {
StringBuilder s = new StringBuilder("TokenList{");
for(String token : tokens) {
s.append(token);
s.append(",");
}
s.append('}');
return s.toString();
}
}
private static List<SuggestedItem> find(Mode m, SearchIndex index, String tokenList, SearchableModelObject searchContext) {
TokenList tokens = new TokenList(tokenList);
if(tokens.length()==0) return Collections.emptyList(); // no tokens given
List<SuggestedItem>[] paths = new List[tokens.length()+1]; // we won't use [0].
for(int i=1;i<=tokens.length();i++)
paths[i] = new ArrayList<SuggestedItem>();
List<SearchItem> items = new ArrayList<SearchItem>(); // items found in 1 step
LOGGER.log(Level.FINE, "tokens={0}", tokens);
// first token
int w=1; // width of token
for (String token : tokens.subSequence(0)) {
items.clear();
m.find(index,token,items);
for (SearchItem si : items) {
paths[w].add(SuggestedItem.build(searchContext ,si));
LOGGER.log(Level.FINE, "found search item: {0}", si.getSearchName());
}
w++;
}
// successive tokens
for (int j=1; j<tokens.length(); j++) {
// for each length
w=1;
for (String token : tokens.subSequence(j)) {
// for each candidate
for (SuggestedItem r : paths[j]) {
items.clear();
m.find(r.item.getSearchIndex(),token,items);
for (SearchItem i : items)
paths[j+w].add(new SuggestedItem(r,i));
}
w++;
}
}
return paths[tokens.length()];
}
private final static Logger LOGGER = Logger.getLogger(Search.class.getName());
}
| |
package com.compomics.util.gui.parameters.identification.search;
import com.compomics.util.examples.BareBonesBrowserLaunch;
import com.compomics.util.experiment.biology.proteins.Protein;
import com.compomics.util.experiment.biology.taxonomy.SpeciesFactory;
import com.compomics.util.experiment.identification.utils.ProteinUtils;
import com.compomics.util.experiment.io.biology.protein.FastaParameters;
import com.compomics.util.experiment.io.biology.protein.FastaSummary;
import com.compomics.util.experiment.io.biology.protein.Header;
import com.compomics.util.experiment.io.biology.protein.ProteinDatabase;
import com.compomics.util.experiment.io.biology.protein.converters.DecoyConverter;
import com.compomics.util.experiment.io.biology.protein.iterators.FastaIterator;
import com.compomics.util.gui.JOptionEditorPane;
import com.compomics.util.gui.protein.FastaParametersDialog;
import com.compomics.util.gui.waiting.waitinghandlers.ProgressDialogX;
import com.compomics.util.io.IoUtil;
import com.compomics.util.io.file.LastSelectedFolder;
import com.compomics.util.parameters.UtilitiesUserParameters;
import java.awt.Dialog;
import java.awt.Frame;
import java.awt.Image;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.stream.Collectors;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.SpinnerListModel;
import javax.swing.filechooser.FileFilter;
/**
* This dialog displays information about a sequence database.
*
* @author Marc Vaudel
* @author Harald Barsnes
*/
public class SequenceDbDetailsDialog extends javax.swing.JDialog {
/**
* A simple progress dialog.
*/
private static ProgressDialogX progressDialog;
/**
* The last selected folder.
*/
private LastSelectedFolder lastSelectedFolder = null;
/**
* Boolean indicating whether the database can be changed.
*/
private boolean dbEditable = true;
/**
* The icon to display when waiting.
*/
private Image waitingImage;
/**
* The normal icon.
*/
private Image normalImange;
/**
* The parent frame.
*/
private Frame parentFrame;
/**
* The utilities user parameters.
*/
private UtilitiesUserParameters utilitiesUserParameters;
/**
* The key to use to store FASTA files paths.
*/
public static final String lastFolderKey = "fastaFile";
/**
* The selected FASTA file.
*/
private String selectedFastaFile = null;
/**
* The parameters used to parse the FASTA file.
*/
private FastaParameters fastaParameters = null;
/**
* Summary information on the FASTA file content.
*/
private FastaSummary fastaSummary = null;
/**
* The batch size of proteins to sample.
*/
public static final int SAMPLE_BATCH_SIZE = 50;
/**
* Accessions of the sampled proteins.
*/
private ArrayList<String> accessionsSample = new ArrayList<>(SAMPLE_BATCH_SIZE);
/**
* Sample of proteins from the database.
*/
private HashMap<String, Protein> proteinsSample = new HashMap<>(SAMPLE_BATCH_SIZE);
/**
* Sample of protein headers from the database.
*/
private HashMap<String, Header> headersSample = new HashMap<>(SAMPLE_BATCH_SIZE);
/**
* A protein iterator to fill the sample.
*/
private FastaIterator proteinIterator;
/**
* Boolean indicating whether the database selection was canceled by the
* user.
*/
private boolean canceled = false;
/**
* Creates a new SequenceDbDetailsDialog with a dialog as owner.
*
* @param owner the dialog owner
* @param parent the parent frame
* @param selectedFastaFile the selected FASTA file
* @param fastaParameters the parameters used to parse the FASTA file
* @param lastSelectedFolder the last selected folder
* @param dbEditable if the database is editable
* @param normalImange the normal icon
* @param waitingImage the waiting icon
*/
public SequenceDbDetailsDialog(
Dialog owner,
Frame parent,
String selectedFastaFile,
FastaParameters fastaParameters,
LastSelectedFolder lastSelectedFolder,
boolean dbEditable,
Image normalImange,
Image waitingImage
) {
super(owner, true);
initComponents();
this.parentFrame = parent;
this.lastSelectedFolder = lastSelectedFolder;
this.dbEditable = dbEditable;
this.waitingImage = waitingImage;
this.normalImange = normalImange;
this.selectedFastaFile = selectedFastaFile;
this.fastaParameters = fastaParameters;
this.utilitiesUserParameters = UtilitiesUserParameters.loadUserParameters();
editFastaParametersJLabel.setEnabled(dbEditable);
if (this.selectedFastaFile != null) {
loadFastaFile(selectedFastaFile, fastaParameters == null, dbEditable, true);
}
setLocationRelativeTo(owner);
}
/**
* Creates a new SequenceDbDetailsDialog.
*
* @param parent the parent frame
* @param selectedFastaFile the selected FASTA file
* @param fastaParameters the parameters used to parse the FASTA file
* @param lastSelectedFolder the last selected folder
* @param dbEditable if the database is editable
* @param normalImange the normal icon
* @param waitingImage the waiting icon
*/
public SequenceDbDetailsDialog(
Frame parent,
String selectedFastaFile,
FastaParameters fastaParameters,
LastSelectedFolder lastSelectedFolder,
boolean dbEditable,
Image normalImange,
Image waitingImage
) {
super(parent, true);
initComponents();
this.parentFrame = parent;
this.lastSelectedFolder = lastSelectedFolder;
this.dbEditable = dbEditable;
this.waitingImage = waitingImage;
this.normalImange = normalImange;
this.selectedFastaFile = selectedFastaFile;
this.fastaParameters = fastaParameters;
this.utilitiesUserParameters = UtilitiesUserParameters.loadUserParameters();
editFastaParametersJLabel.setEnabled(dbEditable);
if (this.selectedFastaFile != null) {
loadFastaFile(selectedFastaFile, fastaParameters == null, dbEditable, true);
}
setLocationRelativeTo(parent);
}
/**
* Set up the GUI.
*/
private void setUpGUI() {
if (selectedFastaFile != null) {
File fastaFile = new File(selectedFastaFile);
fileTxt.setText(selectedFastaFile);
lastModifiedTxt.setText(new Date(fastaFile.lastModified()).toString());
if (fastaParameters == null) {
dbNameTxt.setText("");
versionTxt.setText("");
decoyButton.setEnabled(true);
}
if (fastaSummary != null) {
dbNameTxt.setText(fastaSummary.getName());
versionTxt.setText(fastaSummary.getVersion());
decoyButton.setEnabled(!fastaSummary.containsDecoys() && dbEditable);
// show the species present in the database
speciesJTextField.setText(SpeciesFactory.getSpeciesDescription(fastaSummary.speciesOccurrence));
// show the database type information
HashMap<ProteinDatabase, Integer> databaseType = fastaSummary.databaseType;
// the origin of the sequence information
if (databaseType.size() == 1) {
ProteinDatabase proteinDatabase = databaseType.keySet().stream().findFirst().get();
typeJTextField.setText(proteinDatabase.getFullName());
} else {
TreeMap<Integer, TreeSet<ProteinDatabase>> occurrenceToDBMap = databaseType.entrySet().stream()
.collect(Collectors.groupingBy(Entry::getValue)).entrySet().stream()
.collect(Collectors.toMap(
Entry::getKey,
entry -> entry.getValue().stream()
.map(entry2 -> entry2.getKey())
.collect(Collectors.toCollection(TreeSet::new)),
(a, b) -> {
a.addAll(b);
return a;
},
TreeMap::new));
String dbOccurrenceText = occurrenceToDBMap.descendingMap().values().stream()
.flatMap(dbs -> dbs.stream())
.map(db -> db.getFullName() + " (" + databaseType.get(db) + ")")
.collect(Collectors.joining(", "));
typeJTextField.setText(dbOccurrenceText);
}
// the number of sequences
String nSequences = fastaSummary.nSequences + " sequences";
if (fastaSummary.containsDecoys()) {
nSequences += " (" + fastaSummary.nTarget + " target)";
}
sizeTxt.setText(nSequences);
} else {
speciesJTextField.setText("");
typeJTextField.setText("");
sizeTxt.setText("");
}
browseButton.setEnabled(dbEditable);
if (fastaFile.exists()) {
try {
proteinIterator = new FastaIterator(fastaFile);
bufferProteins();
accessionsSpinner.setEnabled(true);
updateSequence();
} catch (Exception e) {
JOptionPane.showMessageDialog(this,
"An error occurred while reading the FASTA file.",
"Import error", JOptionPane.WARNING_MESSAGE);
e.printStackTrace();
}
} else {
accessionsSpinner.setEnabled(false);
}
}
}
/**
* Updates the displayed sequence.
*/
private void updateSequence() {
String accession = accessionsSpinner.getValue().toString();
Header header = headersSample.get(accession);
Protein protein = proteinsSample.get(accession);
proteinTxt.setText(
header.getRawHeader()
+ System.getProperty("line.separator")
+ protein.getSequence()
);
proteinTxt.setCaretPosition(0);
if (ProteinUtils.isDecoy(accession, fastaParameters)) {
targetDecoyTxt.setText("(Decoy)");
} else {
targetDecoyTxt.setText("(Target)");
}
}
/**
* Returns the last selected folder.
*
* @return the last selected folder
*/
public String getLastSelectedFolder() {
if (lastSelectedFolder == null) {
return null;
}
String folder = lastSelectedFolder.getLastSelectedFolder(lastFolderKey);
if (folder == null) {
folder = lastSelectedFolder.getLastSelectedFolder();
}
return folder;
}
/**
* Allows the user to select a FASTA file, loads its information, and
* returns a boolean indicating whether the process loading was successful.
*
* @param userCanDispose if true, the dialog is closed if the user cancels
* the selection
*
* @return a boolean indicating whether a valid FASTA file was selected
*/
public boolean selectDB(boolean userCanDispose) {
File startLocation = null;
if (utilitiesUserParameters.getDbFolder() != null
&& utilitiesUserParameters.getDbFolder().exists()) {
startLocation = utilitiesUserParameters.getDbFolder();
}
if (startLocation == null) {
startLocation = new File(getLastSelectedFolder());
}
JFileChooser fc = new JFileChooser(startLocation);
FileFilter filter = new FileFilter() {
@Override
public boolean accept(File myFile) {
return myFile.getName().toLowerCase().endsWith("fasta")
|| myFile.isDirectory();
}
@Override
public String getDescription() {
return "FASTA (.fasta)";
}
};
fc.setFileFilter(filter);
int result = fc.showOpenDialog(this);
if (result == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
File folder = file.getParentFile();
utilitiesUserParameters.setDbFolder(folder);
lastSelectedFolder.setLastSelectedFolder(lastFolderKey, folder.getAbsolutePath());
if (file.getName().contains(" ")) {
file = renameFastaFileName(file);
if (file == null) {
return false;
}
}
loadFastaFile(file.getAbsolutePath(), fastaParameters == null, true, true);
return true;
} else if (userCanDispose) {
dispose();
return false;
}
return false;
}
/**
* Loads the given FASTA file.
*
* @param fastaFile a FASTA file
* @param inferParameters if true, FASTA parsing parameters are inferred
* automatically
* @param iNewFastaSummary if true, a new FASTA summary will be created even
* if one already exists
* @param setUpGUI if true the GUI will be updated
*/
private void loadFastaFile(
String fastaFile,
boolean inferParameters,
boolean iNewFastaSummary,
boolean setUpGUI
) {
this.selectedFastaFile = fastaFile;
final boolean newFastaSummary = iNewFastaSummary;
progressDialog = new ProgressDialogX(this, parentFrame,
normalImange,
waitingImage,
true);
new Thread(new Runnable() {
public void run() {
try {
progressDialog.setVisible(true);
} catch (IndexOutOfBoundsException e) {
// ignore
}
}
}, "ProgressDialog").start();
new Thread("importThread") {
public void run() {
try {
if (inferParameters) {
progressDialog.setPrimaryProgressCounterIndeterminate(true);
progressDialog.setTitle("Inferring Database Format. Please Wait...");
fastaParameters = FastaParameters.inferParameters(selectedFastaFile, progressDialog);
}
if (!progressDialog.isRunCanceled()) {
progressDialog.setWaitingText("Importing Database. Please Wait...");
fastaSummary = FastaSummary.getSummary(
selectedFastaFile,
fastaParameters,
newFastaSummary,
progressDialog
);
progressDialog.setSecondaryProgressCounterIndeterminate(true);
if (!fastaSummary.containsDecoys()) {
int outcome = JOptionPane.showConfirmDialog(
SequenceDbDetailsDialog.this,
"The database does not seem to contain decoy sequences.\nAdd decoys?",
"Add decoys?",
JOptionPane.YES_NO_OPTION
);
if (outcome == JOptionPane.YES_OPTION) {
generateTargetDecoyDatabase();
} else {
decoyButton.setEnabled(true);
}
}
if (setUpGUI && !progressDialog.isRunCanceled()) {
setUpGUI();
}
progressDialog.setRunFinished();
}
} catch (Exception e) {
progressDialog.setRunFinished();
JOptionPane.showMessageDialog(
SequenceDbDetailsDialog.this,
JOptionEditorPane.getJOptionEditorPane(
"There was an error importing the FASTA file:<br>"
+ e.getMessage() + "<br>"
+ "See <a href=\"https://compomics.github.io/projects/searchgui/wiki/DatabaseHelp.html\">DatabaseHelp</a> for help."
),
"FASTA Import Error",
JOptionPane.WARNING_MESSAGE);
e.printStackTrace();
return;
}
progressDialog.setRunFinished();
}
}.start();
}
/**
* Appends decoy sequences to the given target database file.
*
* @param targetFile the target database file
* @param progressDialog the progress dialog
*/
private void generateTargetDecoyDatabase() {
// set up the new fasta file name
String newFasta = selectedFastaFile;
File originalFastaFile = new File(selectedFastaFile);
// remove the ending .fasta (if there)
if (selectedFastaFile.lastIndexOf(".") != -1) {
newFasta = selectedFastaFile.substring(0, selectedFastaFile.lastIndexOf("."));
}
// add the target decoy tag
newFasta += fastaParameters.getTargetDecoyFileNameSuffix() + ".fasta";
try {
File newFile = new File(newFasta);
progressDialog.setTitle("Appending Decoy Sequences. Please Wait...");
progressDialog.setPrimaryProgressCounterIndeterminate(false);
progressDialog.setPrimaryProgressCounter(0);
progressDialog.setMaxPrimaryProgressCounter(100);
DecoyConverter.appendDecoySequences(originalFastaFile, newFile, fastaParameters, progressDialog);
progressDialog.setTitle("Getting Database Details. Please Wait...");
progressDialog.setPrimaryProgressCounterIndeterminate(true);
selectedFastaFile = newFile.getAbsolutePath();
fastaParameters = DecoyConverter.getDecoyParameters(fastaParameters);
fastaSummary = DecoyConverter.getDecoySummary(originalFastaFile, fastaSummary);
} catch (OutOfMemoryError error) {
Runtime.getRuntime().gc();
JOptionPane.showMessageDialog(
SequenceDbDetailsDialog.this,
"The tool used up all the available memory and had to be stopped.\n"
+ "Memory boundaries are set in the Edit menu (Edit > Java Options).",
"Out Of Memory Error",
JOptionPane.ERROR_MESSAGE
);
System.out.println("Ran out of memory!");
error.printStackTrace();
} catch (FileNotFoundException e) {
JOptionPane.showMessageDialog(
SequenceDbDetailsDialog.this,
new String[]{"FASTA Import Error.", "File " + selectedFastaFile + " not found."},
"FASTA Import Error", JOptionPane.WARNING_MESSAGE
);
e.printStackTrace();
} catch (Exception e) {
JOptionPane.showMessageDialog(
SequenceDbDetailsDialog.this,
new String[]{"FASTA Import Error.",
"File " + selectedFastaFile + " could not be imported."},
"FASTA Import Error",
JOptionPane.WARNING_MESSAGE
);
e.printStackTrace();
}
}
/**
* Copies the content of the FASTA file to a new file and replaces any white
* space in the file name with '_' instead. Returns the new file, null if an
* error occurred.
*
* @param file the FASTA file to rename
* @return the renamed FASTA file
*/
public File renameFastaFileName(File file) {
String tempName = file.getName();
tempName = tempName.replaceAll(" ", "_");
File renamedFile = new File(file.getParentFile().getAbsolutePath() + File.separator + tempName);
boolean success = false;
try {
success = renamedFile.createNewFile();
if (success) {
IoUtil.copyFile(file, renamedFile);
}
} catch (IOException e) {
JOptionPane.showMessageDialog(
this,
"An error occurred while renaming the file.",
"Please Rename File",
JOptionPane.WARNING_MESSAGE
);
e.printStackTrace();
success = false;
}
if (success) {
JOptionPane.showMessageDialog(
this,
"Your FASTA file name contained white space and has been renamed to:\n"
+ file.getParentFile().getAbsolutePath() + File.separator + tempName,
"Renamed File",
JOptionPane.WARNING_MESSAGE
);
return renamedFile;
}
return null;
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
backgroundPanel = new javax.swing.JPanel();
cancelButton = new javax.swing.JButton();
okButton = new javax.swing.JButton();
databaseInformationPanel = new javax.swing.JPanel();
nameLabel = new javax.swing.JLabel();
dbNameTxt = new javax.swing.JTextField();
typeLabel = new javax.swing.JLabel();
fileTxt = new javax.swing.JTextField();
versionLabel = new javax.swing.JLabel();
versionTxt = new javax.swing.JTextField();
lastModifiedLabel = new javax.swing.JLabel();
lastModifiedTxt = new javax.swing.JTextField();
sizeLabel = new javax.swing.JLabel();
sizeTxt = new javax.swing.JTextField();
decoyButton = new javax.swing.JButton();
browseButton = new javax.swing.JButton();
fileLabel = new javax.swing.JLabel();
typeJTextField = new javax.swing.JTextField();
speciesJTextField = new javax.swing.JTextField();
speciesLabel = new javax.swing.JLabel();
previewPanel = new javax.swing.JPanel();
proteinYxtScrollPane = new javax.swing.JScrollPane();
proteinTxt = new javax.swing.JTextArea();
proteinLabel = new javax.swing.JLabel();
accessionsSpinner = new javax.swing.JSpinner();
targetDecoyTxt = new javax.swing.JLabel();
editFastaParametersJLabel = new javax.swing.JLabel();
databaseHelpSettingsJLabel = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Database Overview");
setMinimumSize(new java.awt.Dimension(500, 500));
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosing(java.awt.event.WindowEvent evt) {
formWindowClosing(evt);
}
});
backgroundPanel.setBackground(new java.awt.Color(230, 230, 230));
cancelButton.setText("Cancel");
cancelButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
cancelButtonActionPerformed(evt);
}
});
okButton.setText("OK");
okButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
okButtonActionPerformed(evt);
}
});
databaseInformationPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Database Details"));
databaseInformationPanel.setOpaque(false);
nameLabel.setText("Name");
dbNameTxt.setEditable(false);
dbNameTxt.setHorizontalAlignment(javax.swing.JTextField.CENTER);
typeLabel.setText("Type(s)");
fileTxt.setEditable(false);
fileTxt.setHorizontalAlignment(javax.swing.JTextField.LEFT);
versionLabel.setText("Version");
versionTxt.setEditable(false);
versionTxt.setHorizontalAlignment(javax.swing.JTextField.CENTER);
lastModifiedLabel.setText("Modified");
lastModifiedTxt.setEditable(false);
lastModifiedTxt.setHorizontalAlignment(javax.swing.JTextField.CENTER);
sizeLabel.setText("Size");
sizeTxt.setEditable(false);
sizeTxt.setHorizontalAlignment(javax.swing.JTextField.CENTER);
decoyButton.setText("Decoys");
decoyButton.setPreferredSize(new java.awt.Dimension(75, 25));
decoyButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
decoyButtonActionPerformed(evt);
}
});
browseButton.setText("Browse");
browseButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
browseButtonActionPerformed(evt);
}
});
fileLabel.setText("File");
typeJTextField.setEditable(false);
typeJTextField.setHorizontalAlignment(javax.swing.JTextField.CENTER);
speciesJTextField.setEditable(false);
speciesJTextField.setHorizontalAlignment(javax.swing.JTextField.CENTER);
speciesLabel.setText("Species");
javax.swing.GroupLayout databaseInformationPanelLayout = new javax.swing.GroupLayout(databaseInformationPanel);
databaseInformationPanel.setLayout(databaseInformationPanelLayout);
databaseInformationPanelLayout.setHorizontalGroup(
databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, databaseInformationPanelLayout.createSequentialGroup()
.addComponent(fileLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(fileTxt)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(browseButton, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(decoyButton, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addComponent(sizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(sizeTxt))
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addComponent(typeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(typeJTextField))
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addComponent(versionLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(versionTxt))
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addComponent(nameLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(dbNameTxt))
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addComponent(lastModifiedLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(lastModifiedTxt))
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addComponent(speciesLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(speciesJTextField)))
.addContainerGap())
);
databaseInformationPanelLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {fileLabel, lastModifiedLabel, nameLabel, sizeLabel, typeLabel, versionLabel});
databaseInformationPanelLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {browseButton, decoyButton});
databaseInformationPanelLayout.setVerticalGroup(
databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(databaseInformationPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(fileTxt, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(decoyButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(browseButton)
.addComponent(fileLabel))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(nameLabel)
.addComponent(dbNameTxt, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(speciesLabel)
.addComponent(speciesJTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(typeLabel)
.addComponent(typeJTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(versionTxt, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(versionLabel))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(sizeLabel)
.addComponent(sizeTxt, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(databaseInformationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(lastModifiedLabel)
.addComponent(lastModifiedTxt, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
databaseInformationPanelLayout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {browseButton, decoyButton});
previewPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Preview"));
previewPanel.setOpaque(false);
proteinTxt.setEditable(false);
proteinTxt.setColumns(20);
proteinTxt.setLineWrap(true);
proteinTxt.setRows(5);
proteinTxt.setWrapStyleWord(true);
proteinYxtScrollPane.setViewportView(proteinTxt);
proteinLabel.setText("Protein");
accessionsSpinner.addChangeListener(new javax.swing.event.ChangeListener() {
public void stateChanged(javax.swing.event.ChangeEvent evt) {
accessionsSpinnerStateChanged(evt);
}
});
targetDecoyTxt.setText("(target/decoy)");
editFastaParametersJLabel.setForeground(new java.awt.Color(0, 0, 255));
editFastaParametersJLabel.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
editFastaParametersJLabel.setText("<html><a style=\"text-decoration: none\">Edit FASTA parsing options</a></html>");
editFastaParametersJLabel.setToolTipText("Edit the FASTA parsing options");
editFastaParametersJLabel.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
editFastaParametersJLabelMouseClicked(evt);
}
public void mouseEntered(java.awt.event.MouseEvent evt) {
editFastaParametersJLabelMouseEntered(evt);
}
public void mouseExited(java.awt.event.MouseEvent evt) {
editFastaParametersJLabelMouseExited(evt);
}
});
javax.swing.GroupLayout previewPanelLayout = new javax.swing.GroupLayout(previewPanel);
previewPanel.setLayout(previewPanelLayout);
previewPanelLayout.setHorizontalGroup(
previewPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(previewPanelLayout.createSequentialGroup()
.addGroup(previewPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(previewPanelLayout.createSequentialGroup()
.addGap(16, 16, 16)
.addComponent(proteinLabel)
.addGap(18, 18, 18)
.addComponent(accessionsSpinner, javax.swing.GroupLayout.PREFERRED_SIZE, 222, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(targetDecoyTxt)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 169, Short.MAX_VALUE)
.addComponent(editFastaParametersJLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 155, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(proteinYxtScrollPane))
.addContainerGap())
);
previewPanelLayout.setVerticalGroup(
previewPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(previewPanelLayout.createSequentialGroup()
.addContainerGap()
.addComponent(proteinYxtScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 160, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(previewPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(proteinLabel)
.addComponent(accessionsSpinner, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(targetDecoyTxt)
.addComponent(editFastaParametersJLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
databaseHelpSettingsJLabel.setForeground(new java.awt.Color(0, 0, 255));
databaseHelpSettingsJLabel.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
databaseHelpSettingsJLabel.setText("<html><a style=\"text-decoration: none\">Database help?</a></html>");
databaseHelpSettingsJLabel.setToolTipText("Open Database Help");
databaseHelpSettingsJLabel.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
databaseHelpSettingsJLabelMouseClicked(evt);
}
public void mouseEntered(java.awt.event.MouseEvent evt) {
databaseHelpSettingsJLabelMouseEntered(evt);
}
public void mouseExited(java.awt.event.MouseEvent evt) {
databaseHelpSettingsJLabelMouseExited(evt);
}
});
javax.swing.GroupLayout backgroundPanelLayout = new javax.swing.GroupLayout(backgroundPanel);
backgroundPanel.setLayout(backgroundPanelLayout);
backgroundPanelLayout.setHorizontalGroup(
backgroundPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(backgroundPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(backgroundPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(backgroundPanelLayout.createSequentialGroup()
.addGap(10, 10, 10)
.addComponent(databaseHelpSettingsJLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 100, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(okButton, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(cancelButton, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(previewPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(databaseInformationPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
backgroundPanelLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {cancelButton, okButton});
backgroundPanelLayout.setVerticalGroup(
backgroundPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(backgroundPanelLayout.createSequentialGroup()
.addContainerGap()
.addComponent(databaseInformationPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(previewPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(backgroundPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(cancelButton)
.addComponent(okButton)
.addComponent(databaseHelpSettingsJLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(backgroundPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(backgroundPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* Saves changes and closes the dialog
*
* @param evt the action event
*/
private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed
UtilitiesUserParameters.saveUserParameters(utilitiesUserParameters);
dispose();
}//GEN-LAST:event_okButtonActionPerformed
/**
* Close the dialog.
*
* @param evt the action event
*/
private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelButtonActionPerformed
canceled = true;
dispose();
}//GEN-LAST:event_cancelButtonActionPerformed
/**
* Open a file chooser to select a FASTA file.
*
* @param evt the action event
*/
private void browseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_browseButtonActionPerformed
selectDB(false);
}//GEN-LAST:event_browseButtonActionPerformed
/**
* Add decoys.
*
* @param evt the action event
*/
private void decoyButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_decoyButtonActionPerformed
progressDialog = new ProgressDialogX(this, parentFrame,
normalImange,
waitingImage,
true);
progressDialog.setPrimaryProgressCounterIndeterminate(true);
progressDialog.setTitle("Creating Decoy. Please Wait...");
new Thread(new Runnable() {
public void run() {
try {
progressDialog.setVisible(true);
} catch (IndexOutOfBoundsException e) {
// ignore
}
}
}, "ProgressDialog").start();
new Thread("DecoyThread") {
public void run() {
generateTargetDecoyDatabase();
if (!progressDialog.isRunCanceled()) {
setUpGUI();
}
progressDialog.setRunFinished();
}
}.start();
}//GEN-LAST:event_decoyButtonActionPerformed
/**
* Update the sequence.
*
* @param evt the change event
*/
private void accessionsSpinnerStateChanged(javax.swing.event.ChangeEvent evt) {//GEN-FIRST:event_accessionsSpinnerStateChanged
updateSequence();
}//GEN-LAST:event_accessionsSpinnerStateChanged
/**
* Open the database help page.
*
* @param evt the mouse event
*/
private void databaseHelpSettingsJLabelMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_databaseHelpSettingsJLabelMouseClicked
this.setCursor(new java.awt.Cursor(java.awt.Cursor.WAIT_CURSOR));
BareBonesBrowserLaunch.openURL("https://compomics.github.io/projects/searchgui/wiki/DatabaseHelp.html");
this.setCursor(new java.awt.Cursor(java.awt.Cursor.DEFAULT_CURSOR));
}//GEN-LAST:event_databaseHelpSettingsJLabelMouseClicked
/**
* Change the cursor to a hand cursor.
*
* @param evt the mouse event
*/
private void databaseHelpSettingsJLabelMouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_databaseHelpSettingsJLabelMouseEntered
this.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
}//GEN-LAST:event_databaseHelpSettingsJLabelMouseEntered
/**
* Change cursor back to the default cursor.
*
* @param evt the mouse event
*/
private void databaseHelpSettingsJLabelMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_databaseHelpSettingsJLabelMouseExited
this.setCursor(new java.awt.Cursor(java.awt.Cursor.DEFAULT_CURSOR));
}//GEN-LAST:event_databaseHelpSettingsJLabelMouseExited
/**
* Close the dialog.
*
* @param evt the action event
*/
private void formWindowClosing(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowClosing
cancelButtonActionPerformed(null);
}//GEN-LAST:event_formWindowClosing
/**
* Open the FASTA parameters dialog.
*
* @param evt
*/
private void editFastaParametersJLabelMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_editFastaParametersJLabelMouseClicked
FastaParametersDialog fastaParametersDialog
= new FastaParametersDialog(this, parentFrame, fastaParameters, dbEditable);
if (!fastaParametersDialog.isCanceled()) {
FastaParameters newFastaParameters = fastaParametersDialog.getFastaSettings();
if (!newFastaParameters.equals(fastaParameters)) {
fastaParameters = newFastaParameters;
loadFastaFile(selectedFastaFile, false, true, true);
}
}
}//GEN-LAST:event_editFastaParametersJLabelMouseClicked
/**
* Change the cursor to a hand cursor.
*
* @param evt the mouse event
*/
private void editFastaParametersJLabelMouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_editFastaParametersJLabelMouseEntered
this.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
}//GEN-LAST:event_editFastaParametersJLabelMouseEntered
/**
* Change cursor back to the default cursor.
*
* @param evt the mouse event
*/
private void editFastaParametersJLabelMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_editFastaParametersJLabelMouseExited
this.setCursor(new java.awt.Cursor(java.awt.Cursor.DEFAULT_CURSOR));
}//GEN-LAST:event_editFastaParametersJLabelMouseExited
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JSpinner accessionsSpinner;
private javax.swing.JPanel backgroundPanel;
private javax.swing.JButton browseButton;
private javax.swing.JButton cancelButton;
private javax.swing.JLabel databaseHelpSettingsJLabel;
private javax.swing.JPanel databaseInformationPanel;
private javax.swing.JTextField dbNameTxt;
private javax.swing.JButton decoyButton;
private javax.swing.JLabel editFastaParametersJLabel;
private javax.swing.JLabel fileLabel;
private javax.swing.JTextField fileTxt;
private javax.swing.JLabel lastModifiedLabel;
private javax.swing.JTextField lastModifiedTxt;
private javax.swing.JLabel nameLabel;
private javax.swing.JButton okButton;
private javax.swing.JPanel previewPanel;
private javax.swing.JLabel proteinLabel;
private javax.swing.JTextArea proteinTxt;
private javax.swing.JScrollPane proteinYxtScrollPane;
private javax.swing.JLabel sizeLabel;
private javax.swing.JTextField sizeTxt;
private javax.swing.JTextField speciesJTextField;
private javax.swing.JLabel speciesLabel;
private javax.swing.JLabel targetDecoyTxt;
private javax.swing.JTextField typeJTextField;
private javax.swing.JLabel typeLabel;
private javax.swing.JLabel versionLabel;
private javax.swing.JTextField versionTxt;
// End of variables declaration//GEN-END:variables
/**
* Buffers proteins sampled from the database.
*
* @throws IOException exception thrown if an error occurred while reading
* the FASTA file
*/
private void bufferProteins() throws IOException {
// clear the old buffer
accessionsSample.clear();
proteinsSample.clear();
headersSample.clear();
proteinsSample.clear();
int proteinCounter = 0;
Protein protein;
while (proteinCounter++ < SAMPLE_BATCH_SIZE
&& (protein = proteinIterator.getNextProtein()) != null) {
String accession = protein.getAccession();
accessionsSample.add(accession);
proteinsSample.put(accession, protein);
headersSample.put(accession, proteinIterator.getLastHeader());
}
accessionsSpinner.setModel(new SpinnerListModel(accessionsSample));
accessionsSpinner.setValue(accessionsSample.get(0));
}
/**
* Returns the selected FASTA file.
*
* @return the selected FASTA file
*/
public String getSelectedFastaFile() {
return selectedFastaFile;
}
/**
* Returns the FASTA parameters.
*
* @return the FASTA parameters
*/
public FastaParameters getFastaParameters() {
return fastaParameters;
}
/**
* Returns a boolean indicating whether the database selection was canceled
* by the user.
*
* @return a boolean indicating whether the database selection was canceled
* by the user
*/
public boolean isCanceled() {
return canceled;
}
}
| |
package tpl.nutz.web;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.context.Context;
import org.apache.velocity.exception.ResourceNotFoundException;
import org.nutz.json.Json;
import org.nutz.lang.Streams;
import org.nutz.mvc.Mvcs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tpl.groovy.GroovyConfig;
import tpl.javasrc.RuntimeClassFinder;
import tpl.nutz.TplJsonIocProvider;
import tpl.velocity.VelocityConfig;
public class RuntimeJavaServlet
extends HttpServlet {
private static final String PREFIX_FORWARD_TO = "fwd:";
private static final long serialVersionUID = -5820311121440447893L;
private static final Logger logger = LoggerFactory.getLogger(RuntimeJavaServlet.class);
public static final String KEY_RUNTIME_FINDER_BEAN = "runtimeClassFinderBean";
public static final String DEF_RUNTIME_FINDER_KEY = "runtimeClassFinder";
public static final String KEY_GROOVY_CONFIG_BEAN = "groovyConfigBean";
public static final String DEF_GROOVY_CONFIG_KEY = "groovyConfig";
public static final String KEY_VELOCITY_CONFIG_BEAN = "velocityConfigBean";
public static final String DEF_VELOCITY_CONFIG_KEY = "velocityConfig";
public static final String KEY_ENCODING = "encoding";
private static final Pattern REGEX_VALID_PATH = Pattern.compile("^([/][a-z,A-Z,_][\\w]*)+$");
private RuntimeClassFinder finder;
private GroovyConfig groovyConfig;
private VelocityConfig velocityConfig;
private String encoding = "UTF-8";
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
String path = Mvcs.getRequestPath(req);
if (path.endsWith("/")) {
super.service(req, resp);
return;
}
req.setCharacterEncoding(encoding);
resp.setCharacterEncoding(encoding);
try {
render(req, resp, path);
} catch (ResourceNotFoundException e) {
logger.info("", e);
} catch (Exception e) {
logger.warn("", e);
}
}
private void render(HttpServletRequest req, HttpServletResponse resp, String path)
throws IOException, ServletException {
String fqcn = toClassName(path);
logger.trace("Class name for path [{}]: {}", path, fqcn);
if (fqcn == null) {
renderTemplate(path + ".html", req, resp, null);
return;
}
try {
runClass(fqcn, path, req, resp);
} catch (ClassNotFoundException e) {
logger.trace("", e);
renderTemplate(path + ".html", req, resp, null);
} catch (InstantiationException e) {
logger.trace("", e);
renderTemplate(path + ".html", req, resp, null);
} catch (IllegalAccessException e) {
logger.trace("", e);
renderTemplate(path + ".html", req, resp, null);
}
}
private void runClass(String fqcn, String path,
HttpServletRequest req, HttpServletResponse resp)
throws ClassNotFoundException, IOException, InstantiationException, IllegalAccessException, ServletException {
Class<?> clazz = finder.findClass(fqcn);
if (HtmlRunnable.class.isAssignableFrom(clazz)) {
runHtml((HtmlRunnable)clazz.newInstance(), path, req, resp);
} else if (JsonRunnable.class.isAssignableFrom(clazz)) {
runJson((JsonRunnable)clazz.newInstance(), path, req, resp);
} else if (RawRunnable.class.isAssignableFrom(clazz)) {
runRaw((RawRunnable)clazz.newInstance(), path, req, resp);
} else {
logger.debug("Class [{}] is not runnable.", clazz);
renderTemplate(path + ".html", req, resp, null);
}
}
private void runHtml(HtmlRunnable inst, String path,
HttpServletRequest req, HttpServletResponse resp) throws IOException {
Map<String, Object> ctx = createContext();
try {
String r = inst.run(path, ctx, req, resp);
if (r == null || r.isEmpty()) {
renderTemplate(path + ".html", req, resp, ctx);
} else if (r.startsWith(PREFIX_FORWARD_TO)) {
render(req, resp, r.substring(PREFIX_FORWARD_TO.length()));
} else if (r.equals("rendered")) {
// Does nothing.
} else {
renderTemplate(r + ".html", req, resp, ctx);
}
} catch (Exception e) {
renderErrorPage(path, ctx, req, resp, e);
}
}
private void renderErrorPage(String path, Map<String, Object> ctx,
HttpServletRequest req, HttpServletResponse resp, Exception e) throws IOException {
Map<String, Object> result = ctx == null ? new HashMap<String, Object>() : ctx;
result.put("exception", e);
renderTemplate("500.html", req, resp, result);
}
private void runJson(JsonRunnable inst, String path,
HttpServletRequest req, HttpServletResponse resp) throws IOException {
Map<String, Object> ctx = createContext();
try {
resp.getWriter().print(Json.toJson(inst.run(path, ctx, req, resp)));
} catch (Exception e) {
// TODO Output less information.
resp.getWriter().print(Json.toJson(e));
}
}
private void runRaw(RawRunnable inst, String path,
HttpServletRequest req, HttpServletResponse resp)
throws IOException, ServletException {
inst.run(path, createContext(), req, resp);
}
private Map<String, Object> createContext() {
// TODO Configure context in a nutz IoC container.
return new HashMap<String, Object>();
}
private String toClassName(String path) {
if (path == null) return null;
String p = path;
if (p.startsWith("/")) p = p.substring(1);
if (p.endsWith("/")) p = p.substring(0, p.length() - 1);
if (!REGEX_VALID_PATH.matcher("/" + p).matches()) return null;
String cn = p.replaceAll("/", ".");
return cn;
}
private void renderTemplate(String path, HttpServletRequest req,
HttpServletResponse resp, Map<String, Object> result)
throws IOException {
VelocityEngine ve = velocityConfig.getEngine();
Context ctx = getContext(path, req, resp, result);
Writer w = getWriter(resp);
try {
ctx.put(GroovyConfig.KEY_GROOVY_CONFIG, groovyConfig);
ve.mergeTemplate(path, velocityConfig.getEncoding(), ctx, w);
} finally {
Streams.safeClose(w);
}
}
private Writer getWriter(HttpServletResponse resp) throws IOException {
try {
return resp.getWriter();
} catch (IOException e) {
return new OutputStreamWriter(resp.getOutputStream());
}
}
private Context getContext(String path, HttpServletRequest req,
HttpServletResponse resp, Map<String, Object> result) {
Context ctx = velocityConfig.newContext(result);
ctx.put(VelocityConfig.KEY_PATH, path);
ctx.put(VelocityConfig.KEY_REQUEST, req);
ctx.put(VelocityConfig.KEY_RESPONSE, resp);
ctx.put(VelocityConfig.KEY_CONTEXT_PATH, req.getContextPath());
ctx.put(VelocityConfig.KEY_REQUEST_URI, Mvcs.getRequestPath(req));
return ctx;
}
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
String n = config.getInitParameter(KEY_RUNTIME_FINDER_BEAN);
if (n == null || n.isEmpty()) n = DEF_RUNTIME_FINDER_KEY;
finder = TplJsonIocProvider.nutzIoc().get(null, n);
if (finder == null) {
finder = new RuntimeClassFinder();
finder.init();
}
n = config.getInitParameter(KEY_GROOVY_CONFIG_BEAN);
if (n == null || n.isEmpty()) n = DEF_GROOVY_CONFIG_KEY;
groovyConfig = TplJsonIocProvider.nutzIoc().get(null, n);
n = config.getInitParameter(KEY_VELOCITY_CONFIG_BEAN);
if (n == null || n.isEmpty()) n = DEF_VELOCITY_CONFIG_KEY;
velocityConfig = TplJsonIocProvider.nutzIoc().get(null, n);
String enc = config.getInitParameter(KEY_ENCODING);
if (enc != null && !enc.isEmpty() && Charset.isSupported(enc)) {
encoding = enc;
}
}
}
| |
/**
* Copyright (C) 2015 Orange
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.francetelecom.clara.cloud.core.service;
import com.francetelecom.clara.cloud.commons.AuthorizationException;
import com.francetelecom.clara.cloud.commons.TechnicalException;
import com.francetelecom.clara.cloud.commons.ValidatorUtil;
import com.francetelecom.clara.cloud.core.service.exception.*;
import com.francetelecom.clara.cloud.coremodel.*;
import com.francetelecom.clara.cloud.services.dto.ApplicationDTO;
import com.francetelecom.clara.cloud.services.dto.ConfigOverrideDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Sort;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static com.francetelecom.clara.cloud.coremodel.ApplicationSpecifications.*;
import static org.springframework.data.jpa.domain.Specifications.where;
/**
* Business implementation for Application management.
*
* All methods are defined as transactional. If no transaction is in progress
* during method call, then it will start a new transaction.
*/
public class ManageApplicationImpl implements ManageApplication {
private static final Logger log = LoggerFactory.getLogger(ManageApplicationImpl.class);
@Autowired
private SecurityUtils securityUtils;
@Autowired(required = true)
private ApplicationRepository applicationRepository;
@Autowired(required = true)
private ApplicationReleaseRepository applicationReleaseRepository;
@Autowired(required = true)
private PaasUserRepository paasUserRepository;
@Autowired(required = true)
private ConfigRoleRepository configRoleRepository;
@Override
public List<Application> findApplications() {
List<Application> applications = applicationRepository.findAll(isActive(), new Sort(Sort.Direction.ASC,"label"));
for (Application application : applications) {
application.setEditable(hasWritePermissionFor(application));
}
return applications;
}
@Override
public List<Application> findAccessibleApplications() {
if (securityUtils.currentUserIsAdmin()) {
return findApplications();
} else {
List<Application> applications = applicationRepository.findAll(where(isActive()).and(isPublicOrHasForMember(securityUtils.currentUser())));
for (Application application : applications) {
application.setEditable(hasWritePermissionFor(application));
}
return applications;
}
}
@Override
public List<Application> findMyApplications() {
return applicationRepository.findAll(where(isActive()).and(hasForMember(securityUtils.currentUser())), new Sort(Sort.Direction.ASC, "label"));
}
@Override
public long countApplications() {
return applicationRepository.count(isActive());
}
@Override
public ApplicationDTO findApplicationByLabel(String label) throws ApplicationNotFoundException {
Application application = applicationRepository.findOne(hasLabel(label));
if (application == null) {
String message = "Application with label[" + label + "] does not exist.";
log.info(message);
throw new ApplicationNotFoundException(message);
}
return new ApplicationDTO(application.getUID(), application.getCode(), application.getLabel(), application.getDescription(),
application.getApplicationRegistryUrl());
}
@Override
public boolean isApplicationLabelUnique(String label) {
return applicationRepository.findOne(where(isActive()).and(hasLabel(label))) == null;
}
private boolean isApplicationCodeUnique(String code) {
return applicationRepository.findOne(where(isActive()).and(hasCode(code))) == null;
}
private boolean applicationHasNoActiveApplicationReleases(String applicationUID) {
return applicationReleaseRepository.countApplicationReleasesByApplicationUID(applicationUID) == 0;
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT, rollbackForClassName = { "BusinessException" })
public String createPublicApplication(String code, String label, String description, URL applicationRegistryUrl, SSOId... members)
throws DuplicateApplicationException, PaasUserNotFoundException {
Application application = createApplication(code, label, description, applicationRegistryUrl, members);
application.setAsPublic();
applicationRepository.save(application);
return application.getUID();
}
private Application createApplication(String code, String label, String description, URL applicationRegistryUrl, SSOId... members)
throws PaasUserNotFoundException, DuplicateApplicationException, AuthorizationException {
log.debug("/******* create application with label[" + label + "] and code [" + code + "]**********/");
Application application = new Application(label, code);
application.setDescription(description);
if (applicationRegistryUrl != null) {
application.setApplicationRegistryUrl(applicationRegistryUrl);
}
final Set<SSOId> candidates = Arrays.asList(members).stream().collect(Collectors.toSet());
assertMembersExist(candidates);
application.setMembers(candidates);
assertHasPermissionFor(application);
// Validate model
ValidatorUtil.validate(application);
// if label is not unique throw exception
if (!isApplicationLabelUnique(application.getLabel())) {
String message = "Application with label[" + label + "] already exists.";
log.info(message);
throw new DuplicateApplicationException(message);
}
// if code is not unique throw exception
if (!isApplicationCodeUnique(application.getCode())) {
String message = "Application with code[" + code + "] already exists.";
log.info(message);
throw new DuplicateApplicationException(message);
}
return application;
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT, rollbackForClassName = { "BusinessException" })
public String createPrivateApplication(String code, String label, String description, URL applicationRegistryUrl, SSOId... members)
throws DuplicateApplicationException, PaasUserNotFoundException {
Application application = createApplication(code, label, description, applicationRegistryUrl, members);
application.setAsPrivate();
applicationRepository.save(application);
return application.getUID();
}
private void assertMembersExist(Set<SSOId> candidates) throws PaasUserNotFoundException {
// assert users with ssoids exist
for (SSOId candidate : candidates) {
if (paasUserRepository.findBySsoId(candidate) == null) {
final String message = "Cannot create application with member list: " + candidates + ". Member " + candidate + " is unknown.";
log.debug(message);
throw new PaasUserNotFoundException(message, candidate);
}
;
}
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT, rollbackForClassName = { "BusinessException" })
public void deleteApplication(String applicationUID) throws ApplicationNotFoundException, AuthorizationException {
Application application = applicationRepository.findByUid(applicationUID);
if (application == null) {
String message = "Application with UID[" + applicationUID + "] does not exist.";
log.info(message);
throw new ApplicationNotFoundException(message);
}
assertHasPermissionFor(application);
// TODO we may to need to enforce this rule in Application entity,
// perhaps in markAsRemoved() operation
if (!applicationHasNoActiveApplicationReleases(applicationUID)) {
String message = "Application with UID[" + applicationUID + "] has active releases.";
log.info(message);
throw new IllegalStateException(message);
}
application.markAsRemoved();
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT, rollbackForClassName = { "BusinessException" })
public boolean canBeDeleted(String applicationUID) throws ApplicationNotFoundException {
log.debug("/******* find application by UID[" + applicationUID + "] **********/");
Application application = applicationRepository.findByUid(applicationUID);
if (application == null) {
String message = "Application with UID[" + applicationUID + "] does not exist.";
log.info(message);
throw new ApplicationNotFoundException(message);
}
return applicationHasNoActiveApplicationReleases(applicationUID) && hasWritePermissionFor(application);
}
@Override
public Application findApplicationByUID(String applicationUID) throws ApplicationNotFoundException {
log.debug("/******* find application by UID[" + applicationUID + "] **********/");
Application application = applicationRepository.findByUid(applicationUID);
if (application == null) {
String message = "Application with UID[" + applicationUID + "] does not exist.";
log.info(message);
throw new ApplicationNotFoundException(message);
}
application.setEditable(hasWritePermissionFor(application));
return application;
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT, rollbackForClassName = { "BusinessException" })
public Application updateApplication(Application application) throws ApplicationNotFoundException, DuplicateApplicationException,
PaasUserNotFoundException {
log.debug("/******* update application with label[" + application.getLabel() + "] **********/");
Application persisted = applicationRepository.findByUid(application.getUID());
if (persisted == null) {
String message = "Application with UID[" + application.getUID() + "] does not exist.";
log.info(message);
throw new ApplicationNotFoundException(message);
}
// assert members exist
assertMembersExist(application.listMembers().stream().collect(Collectors.toSet()));
assertHasPermissionFor(persisted);
// if label has changed but new label is not unique -> throw
// exception
if (labelHasChanged(persisted, application) && !isApplicationLabelUnique(application.getLabel())) {
String message = "Application with label[" + application.getLabel() + "] already exists.";
log.info(message);
throw new DuplicateApplicationException(message);
}
// if code has changed but code is not unique throw exception
if (codeHasChanged(persisted, application) && !isApplicationCodeUnique(application.getCode())) {
String message = "Application with code[" + application.getCode() + "] already exists.";
log.info(message);
throw new DuplicateApplicationException(message);
}
return applicationRepository.save(application);
}
private void assertHasPermissionFor(Application application) throws AuthorizationException {
if (!hasWritePermissionFor(application))
throw new AuthorizationException();
}
private boolean hasWritePermissionFor(Application application) {
return securityUtils.hasWritePermissionFor(application);
}
private boolean labelHasChanged(Application persisted, Application updated) {
return !updated.getLabel().equals(persisted.getLabel());
}
private boolean codeHasChanged(Application persisted, Application updated) {
return !updated.getCode().equals(persisted.getCode());
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT)
public void purgeOldRemovedApplications() {
log.info("*** purge old application");
// find removed applications
List<Application> toPurgeApplications = applicationRepository.findAll(isRemoved());
// hard remove them
applicationRepository.delete(toPurgeApplications);
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT)
public void purgeApplication(String uid) throws ApplicationNotFoundException {
deleteApplication(uid);
Application application = applicationRepository.findByUid(uid);
applicationRepository.delete(application);
}
@Override
public long countMyApplications() {
return applicationRepository.count(where(isActive()).and(hasForMember(securityUtils.currentUser())));
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT)
public String createConfigRole(String applicationUID, String configRoleLabel, List<ConfigOverrideDTO> overrideConfigs) throws ApplicationNotFoundException, InvalidConfigOverrideException {
for (ConfigOverrideDTO overrideConfig : overrideConfigs) {
try {
ValidatorUtil.validate(overrideConfig);
} catch (TechnicalException e) {
throw new InvalidConfigOverrideException(e.getMessage(), e, overrideConfig);
}
}
Application application = applicationRepository.findByUid(applicationUID);
if (application == null) {
String message = "Application with UID[" + applicationUID + "] does not exist.";
log.info(message);
throw new ApplicationNotFoundException(message);
}
assertHasPermissionFor(application);
List<ConfigValue> configValues = new ArrayList<>();
for (ConfigOverrideDTO configOverrideDTO : overrideConfigs) {
configValues.add(new ConfigValue(configOverrideDTO.getConfigSet(), configOverrideDTO.getKey(), configOverrideDTO.getValue(),
configOverrideDTO.getComment()));
}
ConfigRole configRole = new ConfigRole(applicationUID);
configRole.setLastModificationComment(configRoleLabel);
configRole.setValues(configValues);
application.addConfigRole(configRole);
return configRole.getUID();
}
@Override
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.DEFAULT)
public ConfigRole findConfigRole(String configRoleUID) throws ConfigRoleNotFoundException {
log.debug("/******* find configrole by UID[" + configRoleUID + "] **********/");
ConfigRole configRole = configRoleRepository.findByUid(configRoleUID);
if (configRole == null) {
String message = "configrole with UID[" + configRoleUID + "] does not exist.";
log.info(message);
throw new ConfigRoleNotFoundException(message);
}
return configRole;
}
}
| |
package org.apache.lucene.replicator;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexNotFoundException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.replicator.ReplicationClient.ReplicationHandler;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
/**
* A {@link ReplicationHandler} for replication of an index. Implements
* {@link #revisionReady} by copying the files pointed by the client resolver to
* the index {@link Directory} and then touches the index with
* {@link IndexWriter} to make sure any unused files are deleted.
* <p>
* <b>NOTE:</b> this handler assumes that {@link IndexWriter} is not opened by
* another process on the index directory. In fact, opening an
* {@link IndexWriter} on the same directory to which files are copied can lead
* to undefined behavior, where some or all the files will be deleted, override
* other files or simply create a mess. When you replicate an index, it is best
* if the index is never modified by {@link IndexWriter}, except the one that is
* open on the source index, from which you replicate.
* <p>
* This handler notifies the application via a provided {@link Callable} when an
* updated index commit was made available for it.
*
* @lucene.experimental
*/
public class IndexReplicationHandler implements ReplicationHandler {
/**
* The component used to log messages to the {@link InfoStream#getDefault()
* default} {@link InfoStream}.
*/
public static final String INFO_STREAM_COMPONENT = "IndexReplicationHandler";
private final Directory indexDir;
private final Callable<Boolean> callback;
private volatile Map<String,List<RevisionFile>> currentRevisionFiles;
private volatile String currentVersion;
private volatile InfoStream infoStream = InfoStream.getDefault();
/**
* Returns the last {@link IndexCommit} found in the {@link Directory}, or
* {@code null} if there are no commits.
*/
public static IndexCommit getLastCommit(Directory dir) throws IOException {
try {
if (DirectoryReader.indexExists(dir)) {
List<IndexCommit> commits = DirectoryReader.listCommits(dir);
// listCommits guarantees that we get at least one commit back, or
// IndexNotFoundException which we handle below
return commits.get(commits.size() - 1);
}
} catch (IndexNotFoundException e) {
// ignore the exception and return null
}
return null;
}
/**
* Verifies that the last file is segments_N and fails otherwise. It also
* removes and returns the file from the list, because it needs to be handled
* last, after all files. This is important in order to guarantee that if a
* reader sees the new segments_N, all other segment files are already on
* stable storage.
* <p>
* The reason why the code fails instead of putting segments_N file last is
* that this indicates an error in the Revision implementation.
*/
public static String getSegmentsFile(List<String> files, boolean allowEmpty) {
if (files.isEmpty()) {
if (allowEmpty) {
return null;
} else {
throw new IllegalStateException("empty list of files not allowed");
}
}
String segmentsFile = files.remove(files.size() - 1);
if (!segmentsFile.startsWith(IndexFileNames.SEGMENTS) || segmentsFile.equals(IndexFileNames.SEGMENTS_GEN)) {
throw new IllegalStateException("last file to copy+sync must be segments_N but got " + segmentsFile
+ "; check your Revision implementation!");
}
return segmentsFile;
}
/**
* Cleanup the index directory by deleting all given files. Called when file
* copy or sync failed.
*/
public static void cleanupFilesOnFailure(Directory dir, List<String> files) {
for (String file : files) {
// suppress any exception because if we're here, it means copy
// failed, and we must cleanup after ourselves.
IOUtils.deleteFilesIgnoringExceptions(dir, file);
}
}
/**
* Cleans up the index directory from old index files. This method uses the
* last commit found by {@link #getLastCommit(Directory)}. If it matches the
* expected segmentsFile, then all files not referenced by this commit point
* are deleted.
* <p>
* <b>NOTE:</b> this method does a best effort attempt to clean the index
* directory. It suppresses any exceptions that occur, as this can be retried
* the next time.
*/
public static void cleanupOldIndexFiles(Directory dir, String segmentsFile, InfoStream infoStream) {
try {
IndexCommit commit = getLastCommit(dir);
// commit == null means weird IO errors occurred, ignore them
// if there were any IO errors reading the expected commit point (i.e.
// segments files mismatch), then ignore that commit either.
if (commit != null && commit.getSegmentsFileName().equals(segmentsFile)) {
Set<String> commitFiles = new HashSet<>();
commitFiles.addAll(commit.getFileNames());
commitFiles.add(IndexFileNames.SEGMENTS_GEN);
Matcher matcher = IndexFileNames.CODEC_FILE_PATTERN.matcher("");
for (String file : dir.listAll()) {
if (!commitFiles.contains(file)
&& (matcher.reset(file).matches() || file.startsWith(IndexFileNames.SEGMENTS))) {
// suppress exceptions, it's just a best effort
IOUtils.deleteFilesIgnoringExceptions(dir, file);
}
}
}
} catch (Throwable t) {
// ignore any errors that happen during this state and only log it. this
// cleanup will have a chance to succeed the next time we get a new
// revision.
if (infoStream.isEnabled(INFO_STREAM_COMPONENT)) {
infoStream.message(INFO_STREAM_COMPONENT, "cleanupOldIndexFiles(): failed on error " + t.getMessage());
}
}
}
/**
* Copies the files from the source directory to the target one, if they are
* not the same.
*/
public static void copyFiles(Directory source, Directory target, List<String> files) throws IOException {
if (!source.equals(target)) {
for (String file : files) {
source.copy(target, file, file, IOContext.READONCE);
}
}
}
/**
* Writes {@link IndexFileNames#SEGMENTS_GEN} file to the directory, reading
* the generation from the given {@code segmentsFile}. If it is {@code null},
* this method deletes segments.gen from the directory.
*/
public static void writeSegmentsGen(String segmentsFile, Directory dir) {
if (segmentsFile != null) {
SegmentInfos.writeSegmentsGen(dir, SegmentInfos.generationFromSegmentsFileName(segmentsFile));
} else {
IOUtils.deleteFilesIgnoringExceptions(dir, IndexFileNames.SEGMENTS_GEN);
}
}
/**
* Constructor with the given index directory and callback to notify when the
* indexes were updated.
*/
public IndexReplicationHandler(Directory indexDir, Callable<Boolean> callback) throws IOException {
this.callback = callback;
this.indexDir = indexDir;
currentRevisionFiles = null;
currentVersion = null;
if (DirectoryReader.indexExists(indexDir)) {
final List<IndexCommit> commits = DirectoryReader.listCommits(indexDir);
final IndexCommit commit = commits.get(commits.size() - 1);
currentRevisionFiles = IndexRevision.revisionFiles(commit);
currentVersion = IndexRevision.revisionVersion(commit);
final InfoStream infoStream = InfoStream.getDefault();
if (infoStream.isEnabled(INFO_STREAM_COMPONENT)) {
infoStream.message(INFO_STREAM_COMPONENT, "constructor(): currentVersion=" + currentVersion
+ " currentRevisionFiles=" + currentRevisionFiles);
infoStream.message(INFO_STREAM_COMPONENT, "constructor(): commit=" + commit);
}
}
}
@Override
public String currentVersion() {
return currentVersion;
}
@Override
public Map<String,List<RevisionFile>> currentRevisionFiles() {
return currentRevisionFiles;
}
@Override
public void revisionReady(String version, Map<String,List<RevisionFile>> revisionFiles,
Map<String,List<String>> copiedFiles, Map<String,Directory> sourceDirectory) throws IOException {
if (revisionFiles.size() > 1) {
throw new IllegalArgumentException("this handler handles only a single source; got " + revisionFiles.keySet());
}
Directory clientDir = sourceDirectory.values().iterator().next();
List<String> files = copiedFiles.values().iterator().next();
String segmentsFile = getSegmentsFile(files, false);
boolean success = false;
try {
// copy files from the client to index directory
copyFiles(clientDir, indexDir, files);
// fsync all copied files (except segmentsFile)
indexDir.sync(files);
// now copy and fsync segmentsFile
clientDir.copy(indexDir, segmentsFile, segmentsFile, IOContext.READONCE);
indexDir.sync(Collections.singletonList(segmentsFile));
success = true;
} finally {
if (!success) {
files.add(segmentsFile); // add it back so it gets deleted too
cleanupFilesOnFailure(indexDir, files);
}
}
// all files have been successfully copied + sync'd. update the handler's state
currentRevisionFiles = revisionFiles;
currentVersion = version;
if (infoStream.isEnabled(INFO_STREAM_COMPONENT)) {
infoStream.message(INFO_STREAM_COMPONENT, "revisionReady(): currentVersion=" + currentVersion
+ " currentRevisionFiles=" + currentRevisionFiles);
}
// update the segments.gen file
writeSegmentsGen(segmentsFile, indexDir);
// Cleanup the index directory from old and unused index files.
// NOTE: we don't use IndexWriter.deleteUnusedFiles here since it may have
// side-effects, e.g. if it hits sudden IO errors while opening the index
// (and can end up deleting the entire index). It is not our job to protect
// against those errors, app will probably hit them elsewhere.
cleanupOldIndexFiles(indexDir, segmentsFile, infoStream);
// successfully updated the index, notify the callback that the index is
// ready.
if (callback != null) {
try {
callback.call();
} catch (Exception e) {
throw new IOException(e);
}
}
}
/** Sets the {@link InfoStream} to use for logging messages. */
public void setInfoStream(InfoStream infoStream) {
if (infoStream == null) {
infoStream = InfoStream.NO_OUTPUT;
}
this.infoStream = infoStream;
}
}
| |
/*****************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
* *
* *
* This file is part of the BeanShell Java Scripting distribution. *
* Documentation and updates may be found at http://www.beanshell.org/ *
* Patrick Niemeyer (pat@pat.net) *
* Author of Learning Java, O'Reilly & Associates *
* *
*****************************************************************************/
package bsh.servlet;
import java.io.*;
import javax.servlet.*;
import javax.servlet.http.*;
import bsh.*;
/**
This file is part of BeanShell - www.beanshell.org
@author Pat Niemeyer
*/
public class BshServlet extends HttpServlet
{
static String bshVersion;
static String exampleScript = "print(\"hello!\");";
static String getBshVersion()
{
if ( bshVersion != null )
return bshVersion;
/*
We have included a getVersion() command to detect the version
of bsh. If bsh is packaged in the WAR file it could access it
directly as a bsh command. But if bsh is in the app server's
classpath it won't see it here, so we will source it directly.
This command works around the lack of a coherent version number
in the early versions.
*/
Interpreter bsh = new Interpreter();
try {
bsh.eval( new InputStreamReader( BshServlet.class.getResource(
"getVersion.bsh").openStream() ) );
bshVersion = (String)bsh.eval( "getVersion()" );
} catch ( Exception e ) {
bshVersion = "BeanShell: unknown version";
}
return bshVersion;
}
public void doGet(
HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
String script = request.getParameter("bsh.script");
String client = request.getParameter("bsh.client");
String output = request.getParameter("bsh.servlet.output");
String captureOutErr =
request.getParameter("bsh.servlet.captureOutErr");
boolean capture = false;
if ( captureOutErr != null && captureOutErr.equalsIgnoreCase("true") )
capture = true;
Object scriptResult = null;
Exception scriptError = null;
StringBuffer scriptOutput = new StringBuffer();
if ( script != null ) {
try {
scriptResult = evalScript(
script, scriptOutput, capture, request, response );
} catch ( Exception e ) {
scriptError = e;
}
}
response.setHeader( "Bsh-Return", String.valueOf(scriptResult) );
if ( (output != null && output.equalsIgnoreCase("raw"))
|| ( client != null && client.equals("Remote") ) )
sendRaw(
request, response, scriptError, scriptResult, scriptOutput );
else
sendHTML( request, response, script, scriptError,
scriptResult, scriptOutput, capture );
}
void sendHTML(
HttpServletRequest request, HttpServletResponse response,
String script, Exception scriptError, Object scriptResult,
StringBuffer scriptOutput, boolean capture )
throws IOException
{
// Format the output using a simple templating utility
SimpleTemplate st = new SimpleTemplate(
BshServlet.class.getResource("page.template") );
st.replace( "version", getBshVersion() );
//String requestURI = HttpUtils.getRequestURL( request ).toString()
// I was told this should work
String requestURI = request.getRequestURI();
st.replace( "servletURL", requestURI );
if ( script != null )
st.replace( "script", script );
else
st.replace( "script", exampleScript );
if ( capture )
st.replace( "captureOutErr", "CHECKED" );
else
st.replace( "captureOutErr", "" );
if ( script != null )
st.replace( "scriptResult",
formatScriptResultHTML(
script, scriptResult, scriptError, scriptOutput ) );
response.setContentType("text/html");
PrintWriter out = response.getWriter();
st.write(out);
out.flush();
}
void sendRaw(
HttpServletRequest request, HttpServletResponse response,
Exception scriptError, Object scriptResult, StringBuffer scriptOutput )
throws IOException
{
response.setContentType("text/plain");
PrintWriter out = response.getWriter();
if ( scriptError != null )
out.println( "Script Error:\n"+scriptError );
else
out.println( scriptOutput.toString() );
out.flush();
}
/**
*/
String formatScriptResultHTML(
String script, Object result, Exception error,
StringBuffer scriptOutput )
throws IOException
{
SimpleTemplate tmplt;
if ( error != null )
{
tmplt = new SimpleTemplate(
getClass().getResource("error.template") );
String errString;
if ( error instanceof bsh.EvalError )
{
int lineNo = ((EvalError)error).getErrorLineNumber();
String msg = error.getMessage();
int contextLines = 4;
errString = escape(msg);
if ( lineNo > -1 )
errString += "<hr>"
+ showScriptContextHTML( script, lineNo, contextLines );
} else
errString = escape( error.toString() );
tmplt.replace("error", errString );
} else {
tmplt = new SimpleTemplate(
getClass().getResource("result.template") );
tmplt.replace( "value", escape(String.valueOf(result)) );
tmplt.replace( "output", escape(scriptOutput.toString()) );
}
return tmplt.toString();
}
/*
Show context number lines of string before and after target line.
Add HTML formatting to bold the target line.
*/
String showScriptContextHTML( String s, int lineNo, int context )
{
StringBuffer sb = new StringBuffer();
BufferedReader br = new BufferedReader( new StringReader(s) );
int beginLine = Math.max( 1, lineNo-context );
int endLine = lineNo + context;
for( int i=1; i<=lineNo+context+1; i++ )
{
if ( i < beginLine )
{
try {
br.readLine();
} catch ( IOException e ) {
throw new RuntimeException( e.toString() );
}
continue;
}
if ( i > endLine )
break;
String line;
try {
line = br.readLine();
} catch ( IOException e ) {
throw new RuntimeException( e.toString() );
}
if ( line == null )
break;
if ( i == lineNo )
sb.append( "<font color=\"red\">"+i+": "+line +"</font><br/>" );
else
sb.append( i+": " +line +"<br/>" );
}
return sb.toString();
}
public void doPost(
HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
doGet( request, response );
}
Object evalScript(
String script, StringBuffer scriptOutput, boolean captureOutErr,
HttpServletRequest request, HttpServletResponse response )
throws EvalError
{
// Create a PrintStream to capture output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream pout = new PrintStream( baos );
// Create an interpreter instance with a null inputstream,
// the capture out/err stream, non-interactive
Interpreter bsh = new Interpreter( null, pout, pout, false );
// set up interpreter
bsh.set( "bsh.httpServletRequest", request );
bsh.set( "bsh.httpServletResponse", response );
// Eval the text, gathering the return value or any error.
Object result = null;
String error = null;
PrintStream sout = System.out;
PrintStream serr = System.err;
if ( captureOutErr ) {
System.setOut( pout );
System.setErr( pout );
}
try {
// Eval the user text
result = bsh.eval( script );
} finally {
if ( captureOutErr ) {
System.setOut( sout );
System.setErr( serr );
}
}
pout.flush();
scriptOutput.append( baos.toString() );
return result;
}
/**
* Convert special characters to entities for XML output
*/
public static String escape(String value)
{
String search = "&<>";
String[] replace = {"&", "<", ">"};
StringBuffer buf = new StringBuffer();
for (int i = 0; i < value.length(); i++)
{
char c = value.charAt(i);
int pos = search.indexOf(c);
if (pos < 0)
buf.append(c);
else
buf.append(replace[pos]);
}
return buf.toString();
}
}
| |
/*
* Copyright 2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package voldemort.store.memory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.log4j.Logger;
import voldemort.TestUtils;
import voldemort.common.OpTimeMap;
import voldemort.common.VoldemortOpCode;
import voldemort.store.AbstractStorageEngineTest;
import voldemort.store.StorageEngine;
import voldemort.store.slow.SlowStorageEngine;
import voldemort.utils.ByteArray;
import voldemort.utils.ByteUtils;
import voldemort.utils.pool.KeyedResourcePool;
import voldemort.versioning.ObsoleteVersionException;
import voldemort.versioning.VectorClock;
import voldemort.versioning.Versioned;
public class SlowStorageEngineTest extends AbstractStorageEngineTest {
private static final Logger logger = Logger.getLogger(KeyedResourcePool.class.getName());
private StorageEngine<ByteArray, byte[], byte[]> store;
private final List<Byte> opList;
public SlowStorageEngineTest() {
opList = new ArrayList<Byte>();
opList.add(VoldemortOpCode.GET_OP_CODE);
opList.add(VoldemortOpCode.GET_VERSION_OP_CODE);
opList.add(VoldemortOpCode.GET_ALL_OP_CODE);
opList.add(VoldemortOpCode.PUT_OP_CODE);
opList.add(VoldemortOpCode.DELETE_OP_CODE);
}
@Override
public StorageEngine<ByteArray, byte[], byte[]> getStorageEngine() {
return store;
}
@Override
public void setUp() throws Exception {
super.setUp();
// Do not change the magic constants in the next two constructors! The
// unit tests assert on specific delays occurring.
OpTimeMap queued = new OpTimeMap(10, 20, 30, 40, 50);
OpTimeMap concurrent = new OpTimeMap(50, 40, 30, 20, 10);
this.store = new SlowStorageEngine<ByteArray, byte[], byte[]>(new InMemoryStorageEngine<ByteArray, byte[], byte[]>("test"),
queued,
concurrent);
}
@Override
public List<ByteArray> getKeys(int numKeys) {
List<ByteArray> keys = new ArrayList<ByteArray>(numKeys);
for(int i = 0; i < numKeys; i++)
keys.add(new ByteArray(TestUtils.randomBytes(10)));
return keys;
}
private String getOpName(Byte opCode) {
switch(opCode) {
case VoldemortOpCode.GET_OP_CODE:
return "Get";
case VoldemortOpCode.GET_VERSION_OP_CODE:
return "GetVersion";
case VoldemortOpCode.GET_ALL_OP_CODE:
return "GetAll";
case VoldemortOpCode.DELETE_OP_CODE:
return "Delete";
case VoldemortOpCode.PUT_OP_CODE:
return "Put";
default:
logger.error("getOpName invoked with bad operation code: " + opCode);
}
return null;
}
public class OpInvoker implements Runnable {
private final CountDownLatch signal;
private final byte opCode;
private ConcurrentLinkedQueue<Long> runTimes;
private final ByteArray key;
private final byte[] value;
OpInvoker(CountDownLatch signal, byte opCode, ConcurrentLinkedQueue<Long> runTimes) {
this.signal = signal;
this.opCode = opCode;
this.runTimes = runTimes;
this.key = new ByteArray(ByteUtils.getBytes("key", "UTF-8"));
this.value = ByteUtils.getBytes("value", "UTF-8");
logger.debug("OpInvoker created for operation " + getOpName(this.opCode) + "(Thread: "
+ Thread.currentThread().getName() + ")");
}
private void doGet() {
store.get(key, null);
}
private void doGetAll() {
List<ByteArray> keys = new ArrayList<ByteArray>();
keys.add(key);
store.getAll(keys, null);
}
private void doGetVersion() {
store.getVersions(key);
}
private void doPut() {
try {
store.put(key, new Versioned<byte[]>(value), null);
} catch(ObsoleteVersionException e) {
// This exception is expected in some tests.
}
}
private void doDelete() {
store.delete(key, new VectorClock());
}
public void run() {
long startTimeNs = System.nanoTime();
switch(this.opCode) {
case VoldemortOpCode.GET_OP_CODE:
doGet();
break;
case VoldemortOpCode.GET_VERSION_OP_CODE:
doGetVersion();
break;
case VoldemortOpCode.GET_ALL_OP_CODE:
doGetAll();
break;
case VoldemortOpCode.PUT_OP_CODE:
doPut();
break;
case VoldemortOpCode.DELETE_OP_CODE:
doDelete();
break;
default:
logger.error("OpInvoker issued with bad operation code: " + this.opCode);
}
long runTimeMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs);
runTimes.add(runTimeMs);
logger.debug("OpInvoker finished operation " + getOpName(this.opCode) + "(Thread: "
+ Thread.currentThread().getName() + ")");
signal.countDown();
}
}
// true if runtime is not within a "reasonable" range. Reasonable
// defined by a 10% fudge factor.
private boolean isRunTimeBad(long runTimeMs, long expectedTimeMs) {
if((runTimeMs < (expectedTimeMs * 0.9) || runTimeMs > (expectedTimeMs * 1.1))) {
return true;
}
return false;
}
/**
* Test the time of each op type individually.
*/
public void testEachOpTypeIndividually() {
// Magic constant 60 ms is based on operation times defined above.
long expectedMs = 60;
// Magic constants 50 and 10 below allow us to make sure a tight timing
// test passes 80% of the time.
int numOps = 50;
int numOpsWithBadTimesOK = 10;
for(byte op: opList) {
int badTimesCounter = 0;
for(int i = 0; i < numOps; ++i) {
CountDownLatch waitForOp = new CountDownLatch(1);
ConcurrentLinkedQueue<Long> runTimes = new ConcurrentLinkedQueue<Long>();
new Thread(new OpInvoker(waitForOp, op, runTimes)).start();
try {
waitForOp.await();
} catch(InterruptedException e) {
e.printStackTrace();
}
long runTimeMs = runTimes.poll();
assertTrue(runTimes.isEmpty());
if(isRunTimeBad(runTimeMs, expectedMs)) {
System.err.println("Bad run time (some are expected): " + getOpName(op)
+ ", runTimeMs: " + runTimeMs + ", expectedMs: "
+ expectedMs + ")");
badTimesCounter++;
}
}
assertFalse("Too many bad times for operation " + getOpName(op),
badTimesCounter > numOpsWithBadTimesOK);
}
}
/**
* Test repeated operations.
*/
public void testEachOpTypeRepeated() {
// Magic number '2': Run once to warm up, then tests timing asserts.
for(int j = 0; j < 2; j++) {
// Magic constant 1 means we can have one op report a bad (tight)
// timing result
int numOpsWithBadTimesAllowed = 1;
int numOpsWithBadTimes = 0;
for(byte op: opList) {
ConcurrentLinkedQueue<Long> runTimes = new ConcurrentLinkedQueue<Long>();
CountDownLatch waitForOps = new CountDownLatch(5 + 1);
for(int i = 0; i < 5; ++i) {
new Thread(new OpInvoker(waitForOps, op, runTimes)).start();
}
waitForOps.countDown();
try {
waitForOps.await();
} catch(InterruptedException e) {
e.printStackTrace();
}
// Test runs after the single warm up run.
if(j > 0) {
// Determine what the longest delay should be and test the
// maximum delay against that value. The magic constants
// used to construct the SlowStorageEngine determine the
// longest delay.
Long[] allTimes = runTimes.toArray(new Long[0]);
Arrays.sort(allTimes);
long maxTimeMs = allTimes[4];
long expectedTimeMs = 0;
switch(op) {
case VoldemortOpCode.GET_OP_CODE:
expectedTimeMs = (5 * 10) + 50;
break;
case VoldemortOpCode.GET_VERSION_OP_CODE:
expectedTimeMs = (5 * 50) + 10;
break;
case VoldemortOpCode.GET_ALL_OP_CODE:
expectedTimeMs = (5 * 40) + 20;
break;
case VoldemortOpCode.PUT_OP_CODE:
expectedTimeMs = (5 * 20) + 40;
break;
case VoldemortOpCode.DELETE_OP_CODE:
expectedTimeMs = (5 * 30) + 30;
break;
}
if(isRunTimeBad(maxTimeMs, expectedTimeMs)) {
numOpsWithBadTimes++;
String details = getOpName(op) + ", maxTimeMs: " + maxTimeMs + ", "
+ expectedTimeMs;
System.err.println("Bad run time (some are expected): " + details);
}
}
assertFalse("Too many operations with bad run times: " + numOpsWithBadTimes,
numOpsWithBadTimes > numOpsWithBadTimesAllowed);
}
}
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.servicecatalog.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/servicecatalog-2015-12-10/CreateProvisioningArtifact"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateProvisioningArtifactRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The language code to use for this operation. Supported language codes are as follows:
* </p>
* <p>
* "en" (English)
* </p>
* <p>
* "jp" (Japanese)
* </p>
* <p>
* "zh" (Chinese)
* </p>
* <p>
* If no code is specified, "en" is used as the default.
* </p>
*/
private String acceptLanguage;
/**
* <p>
* The product identifier.
* </p>
*/
private String productId;
/**
* <p>
* The parameters to use when creating the new provisioning artifact.
* </p>
*/
private ProvisioningArtifactProperties parameters;
/**
* <p>
* A token to disambiguate duplicate requests. You can create multiple resources using the same input in multiple
* requests, provided that you also specify a different idempotency token for each request.
* </p>
*/
private String idempotencyToken;
/**
* <p>
* The language code to use for this operation. Supported language codes are as follows:
* </p>
* <p>
* "en" (English)
* </p>
* <p>
* "jp" (Japanese)
* </p>
* <p>
* "zh" (Chinese)
* </p>
* <p>
* If no code is specified, "en" is used as the default.
* </p>
*
* @param acceptLanguage
* The language code to use for this operation. Supported language codes are as follows:</p>
* <p>
* "en" (English)
* </p>
* <p>
* "jp" (Japanese)
* </p>
* <p>
* "zh" (Chinese)
* </p>
* <p>
* If no code is specified, "en" is used as the default.
*/
public void setAcceptLanguage(String acceptLanguage) {
this.acceptLanguage = acceptLanguage;
}
/**
* <p>
* The language code to use for this operation. Supported language codes are as follows:
* </p>
* <p>
* "en" (English)
* </p>
* <p>
* "jp" (Japanese)
* </p>
* <p>
* "zh" (Chinese)
* </p>
* <p>
* If no code is specified, "en" is used as the default.
* </p>
*
* @return The language code to use for this operation. Supported language codes are as follows:</p>
* <p>
* "en" (English)
* </p>
* <p>
* "jp" (Japanese)
* </p>
* <p>
* "zh" (Chinese)
* </p>
* <p>
* If no code is specified, "en" is used as the default.
*/
public String getAcceptLanguage() {
return this.acceptLanguage;
}
/**
* <p>
* The language code to use for this operation. Supported language codes are as follows:
* </p>
* <p>
* "en" (English)
* </p>
* <p>
* "jp" (Japanese)
* </p>
* <p>
* "zh" (Chinese)
* </p>
* <p>
* If no code is specified, "en" is used as the default.
* </p>
*
* @param acceptLanguage
* The language code to use for this operation. Supported language codes are as follows:</p>
* <p>
* "en" (English)
* </p>
* <p>
* "jp" (Japanese)
* </p>
* <p>
* "zh" (Chinese)
* </p>
* <p>
* If no code is specified, "en" is used as the default.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateProvisioningArtifactRequest withAcceptLanguage(String acceptLanguage) {
setAcceptLanguage(acceptLanguage);
return this;
}
/**
* <p>
* The product identifier.
* </p>
*
* @param productId
* The product identifier.
*/
public void setProductId(String productId) {
this.productId = productId;
}
/**
* <p>
* The product identifier.
* </p>
*
* @return The product identifier.
*/
public String getProductId() {
return this.productId;
}
/**
* <p>
* The product identifier.
* </p>
*
* @param productId
* The product identifier.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateProvisioningArtifactRequest withProductId(String productId) {
setProductId(productId);
return this;
}
/**
* <p>
* The parameters to use when creating the new provisioning artifact.
* </p>
*
* @param parameters
* The parameters to use when creating the new provisioning artifact.
*/
public void setParameters(ProvisioningArtifactProperties parameters) {
this.parameters = parameters;
}
/**
* <p>
* The parameters to use when creating the new provisioning artifact.
* </p>
*
* @return The parameters to use when creating the new provisioning artifact.
*/
public ProvisioningArtifactProperties getParameters() {
return this.parameters;
}
/**
* <p>
* The parameters to use when creating the new provisioning artifact.
* </p>
*
* @param parameters
* The parameters to use when creating the new provisioning artifact.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateProvisioningArtifactRequest withParameters(ProvisioningArtifactProperties parameters) {
setParameters(parameters);
return this;
}
/**
* <p>
* A token to disambiguate duplicate requests. You can create multiple resources using the same input in multiple
* requests, provided that you also specify a different idempotency token for each request.
* </p>
*
* @param idempotencyToken
* A token to disambiguate duplicate requests. You can create multiple resources using the same input in
* multiple requests, provided that you also specify a different idempotency token for each request.
*/
public void setIdempotencyToken(String idempotencyToken) {
this.idempotencyToken = idempotencyToken;
}
/**
* <p>
* A token to disambiguate duplicate requests. You can create multiple resources using the same input in multiple
* requests, provided that you also specify a different idempotency token for each request.
* </p>
*
* @return A token to disambiguate duplicate requests. You can create multiple resources using the same input in
* multiple requests, provided that you also specify a different idempotency token for each request.
*/
public String getIdempotencyToken() {
return this.idempotencyToken;
}
/**
* <p>
* A token to disambiguate duplicate requests. You can create multiple resources using the same input in multiple
* requests, provided that you also specify a different idempotency token for each request.
* </p>
*
* @param idempotencyToken
* A token to disambiguate duplicate requests. You can create multiple resources using the same input in
* multiple requests, provided that you also specify a different idempotency token for each request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateProvisioningArtifactRequest withIdempotencyToken(String idempotencyToken) {
setIdempotencyToken(idempotencyToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAcceptLanguage() != null)
sb.append("AcceptLanguage: ").append(getAcceptLanguage()).append(",");
if (getProductId() != null)
sb.append("ProductId: ").append(getProductId()).append(",");
if (getParameters() != null)
sb.append("Parameters: ").append(getParameters()).append(",");
if (getIdempotencyToken() != null)
sb.append("IdempotencyToken: ").append(getIdempotencyToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateProvisioningArtifactRequest == false)
return false;
CreateProvisioningArtifactRequest other = (CreateProvisioningArtifactRequest) obj;
if (other.getAcceptLanguage() == null ^ this.getAcceptLanguage() == null)
return false;
if (other.getAcceptLanguage() != null && other.getAcceptLanguage().equals(this.getAcceptLanguage()) == false)
return false;
if (other.getProductId() == null ^ this.getProductId() == null)
return false;
if (other.getProductId() != null && other.getProductId().equals(this.getProductId()) == false)
return false;
if (other.getParameters() == null ^ this.getParameters() == null)
return false;
if (other.getParameters() != null && other.getParameters().equals(this.getParameters()) == false)
return false;
if (other.getIdempotencyToken() == null ^ this.getIdempotencyToken() == null)
return false;
if (other.getIdempotencyToken() != null && other.getIdempotencyToken().equals(this.getIdempotencyToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAcceptLanguage() == null) ? 0 : getAcceptLanguage().hashCode());
hashCode = prime * hashCode + ((getProductId() == null) ? 0 : getProductId().hashCode());
hashCode = prime * hashCode + ((getParameters() == null) ? 0 : getParameters().hashCode());
hashCode = prime * hashCode + ((getIdempotencyToken() == null) ? 0 : getIdempotencyToken().hashCode());
return hashCode;
}
@Override
public CreateProvisioningArtifactRequest clone() {
return (CreateProvisioningArtifactRequest) super.clone();
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.windgate.util;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import com.asakusafw.dmdl.java.emitter.EmitContext;
import com.asakusafw.utils.java.model.syntax.Attribute;
import com.asakusafw.utils.java.model.syntax.ClassDeclaration;
import com.asakusafw.utils.java.model.syntax.Expression;
import com.asakusafw.utils.java.model.syntax.MethodDeclaration;
import com.asakusafw.utils.java.model.syntax.ModelFactory;
import com.asakusafw.utils.java.model.syntax.Name;
import com.asakusafw.utils.java.model.syntax.Type;
import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration;
import com.asakusafw.utils.java.model.syntax.WildcardBoundKind;
import com.asakusafw.utils.java.model.util.AttributeBuilder;
import com.asakusafw.utils.java.model.util.ExpressionBuilder;
import com.asakusafw.utils.java.model.util.JavadocBuilder;
import com.asakusafw.utils.java.model.util.Models;
import com.asakusafw.utils.java.model.util.TypeBuilder;
import com.asakusafw.vocabulary.external.ImporterDescription.DataSize;
import com.asakusafw.windgate.core.vocabulary.DataModelStreamSupport;
/**
* Generates {@code FsImporterDescription} and {@code FsExporterDescription}.
* @since 0.7.3
*/
public final class FsProcessDescriptionGenerator {
// for reduce library dependencies
private static final String IMPORTER_TYPE_NAME =
"com.asakusafw.vocabulary.windgate.FsImporterDescription"; //$NON-NLS-1$
// for reduce library dependencies
private static final String EXPORTER_TYPE_NAME =
"com.asakusafw.vocabulary.windgate.FsExporterDescription"; //$NON-NLS-1$
private final EmitContext context;
private final Description description;
private final ModelFactory f;
private final boolean importer;
private FsProcessDescriptionGenerator(
EmitContext context,
Description description,
boolean importer) {
assert context != null;
assert description != null;
this.context = context;
this.f = context.getModelFactory();
this.importer = importer;
this.description = description;
}
/**
* Generates the class in the context.
* @param context the target emit context
* @param description the meta-description of target class
* @throws IOException if generation was failed by I/O error
*/
public static void generateImporter(EmitContext context, Description description) throws IOException {
FsProcessDescriptionGenerator generator = new FsProcessDescriptionGenerator(context, description, true);
generator.emit();
}
/**
* Generates the class in the context.
* @param context the target emit context
* @param description the meta-description of target class
* @throws IOException if generation was failed by I/O error
*/
public static void generateExporter(EmitContext context, Description description) throws IOException {
FsProcessDescriptionGenerator generator = new FsProcessDescriptionGenerator(context, description, false);
generator.emit();
}
private void emit() throws IOException {
ClassDeclaration decl = f.newClassDeclaration(
new JavadocBuilder(f)
.inline("{0} for {1}.",
d -> d.text(description.getDescription()),
d -> d.linkType(context.resolve(description.getModelClassName())))
.toJavadoc(),
getClassAttributes(),
context.getTypeName(),
context.resolve(Models.toName(f, importer ? IMPORTER_TYPE_NAME : EXPORTER_TYPE_NAME)),
Collections.emptyList(),
createMembers());
context.emit(decl);
}
private List<? extends Attribute> getClassAttributes() {
AttributeBuilder builder = new AttributeBuilder(f);
builder.Public();
if (description.getProfileName() == null
|| description.getPath() == null
|| description.getSupportClassName() == null) {
builder.Abstract();
}
return builder.toAttributes();
}
private List<TypeBodyDeclaration> createMembers() {
List<TypeBodyDeclaration> results = new ArrayList<>();
results.add(createGetModelType());
if (description.getProfileName() != null) {
results.add(createGetProfileName());
}
if (description.getPath() != null) {
results.add(createGetPath());
}
if (description.getSupportClassName() != null) {
results.add(createGetStreamSupport());
}
if (description.getDataSize() != null) {
results.add(createGetDataSize());
}
return results;
}
private MethodDeclaration createGetModelType() {
return createGetter(
new TypeBuilder(f, context.resolve(Class.class))
.parameterize(f.newWildcard(
WildcardBoundKind.UPPER_BOUNDED,
context.resolve(description.getModelClassName())))
.toType(),
"getModelType", //$NON-NLS-1$
f.newClassLiteral(context.resolve(description.getModelClassName())));
}
private MethodDeclaration createGetProfileName() {
return createGetter(
context.resolve(String.class),
"getProfileName", //$NON-NLS-1$
Models.toLiteral(f, description.getProfileName()));
}
private MethodDeclaration createGetStreamSupport() {
return createGetter(
new TypeBuilder(f, context.resolve(Class.class))
.parameterize(f.newWildcard(
WildcardBoundKind.UPPER_BOUNDED,
new TypeBuilder(f, context.resolve(DataModelStreamSupport.class))
.parameterize(f.newWildcard())
.toType()))
.toType(),
"getStreamSupport", //$NON-NLS-1$
f.newClassLiteral(context.resolve(description.getSupportClassName())));
}
private MethodDeclaration createGetPath() {
return createGetter(
context.resolve(String.class),
"getPath", //$NON-NLS-1$
Models.toLiteral(f, description.getPath()));
}
private MethodDeclaration createGetDataSize() {
Type type = context.resolve(DataSize.class);
return createGetter(
type,
"getDataSize", //$NON-NLS-1$
new TypeBuilder(f, type)
.field(description.getDataSize().name())
.toExpression());
}
private MethodDeclaration createGetter(
com.asakusafw.utils.java.model.syntax.Type type,
String name,
Expression value) {
assert type != null;
assert name != null;
assert value != null;
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
type,
f.newSimpleName(name),
Collections.emptyList(),
Arrays.asList(new ExpressionBuilder(f, value).toReturnStatement()));
}
/**
* Represents the meta description.
* @since 0.7.0
*/
public static final class Description {
private final String description;
private final Name modelClassName;
private String path;
private String profileName;
private Name supportClassName;
private DataSize dataSize;
/**
* Creates a new instance.
* @param description the textual description
* @param modelClassName the target data model class name
*/
public Description(String description, Name modelClassName) {
this.description = description;
this.modelClassName = modelClassName;
}
/**
* Returns the textual description for the target class.
* @return the description
*/
public String getDescription() {
return description;
}
/**
* Returns the data model class name.
* @return the data model class name
*/
public Name getModelClassName() {
return modelClassName;
}
/**
* Returns the profile name.
* @return the profile name, or {@code null} if it is not set
*/
public String getProfileName() {
return profileName;
}
/**
* Sets the profile name.
* @param value the value to set
*/
public void setProfileName(String value) {
this.profileName = value;
}
/**
* Returns path.
* @return the path, or {@code null} if it is not set
*/
public String getPath() {
return path;
}
/**
* Sets the path.
* @param value the value to set
*/
public void setPath(String value) {
this.path = value;
}
/**
* Returns the format class name.
* @return the format class name, or {@code null} if it is not set
*/
public Name getSupportClassName() {
return supportClassName;
}
/**
* Sets the format class name.
* @param value the value to set
*/
public void setSupportClassName(Name value) {
this.supportClassName = value;
}
/**
* Returns the data size.
* @return the data size, or {@code null} if it is not set
*/
public DataSize getDataSize() {
return dataSize;
}
/**
* Sets the data size.
* @param value the value to set
*/
public void setDataSize(DataSize value) {
this.dataSize = value;
}
}
}
| |
package nl.esciencecenter.octopus.webservice.api;
/*
* #%L
* Octopus Job Webservice
* %%
* Copyright (C) 2013 Nederlands eScience Center
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static com.yammer.dropwizard.testing.JsonHelpers.asJson;
import static com.yammer.dropwizard.testing.JsonHelpers.fromJson;
import static com.yammer.dropwizard.testing.JsonHelpers.jsonFixture;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import nl.esciencecenter.octopus.Octopus;
import nl.esciencecenter.octopus.engine.files.AbsolutePathImplementation;
import nl.esciencecenter.octopus.exceptions.OctopusException;
import nl.esciencecenter.octopus.exceptions.OctopusIOException;
import nl.esciencecenter.octopus.files.AbsolutePath;
import nl.esciencecenter.octopus.files.FileSystem;
import nl.esciencecenter.octopus.files.Files;
import nl.esciencecenter.octopus.files.RelativePath;
import nl.esciencecenter.octopus.jobs.JobDescription;
import nl.esciencecenter.octopus.util.Sandbox;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class JobSubmitRequestTest {
private JobSubmitRequest request;
@Before
public void setUp() {
request = sampleRequest();
}
@Test
public void testJobSubmitRequest_AllArguments() {
assertNotNull(request);
}
@Test
public void testGetStatus_callback_url() throws URISyntaxException {
assertEquals(request.getStatus_callback_url(), new URI("http://localhost/status"));
}
@Test
public void testJobSubmitRequest() {
assertNotNull(new JobSubmitRequest());
}
public static JobSubmitRequest sampleRequest() {
List<String> arguments = new ArrayList<String>();
arguments.add("runme.sh");
List<String> prestaged = new ArrayList<String>();
prestaged.add("runme.sh");
prestaged.add("input.dat");
List<String> poststaged = new ArrayList<String>();
poststaged.add("output.dat");
URI cb = null;
try {
cb = new URI("http://localhost/status");
} catch (URISyntaxException e) {
}
return new JobSubmitRequest("/tmp/jobdir", "/bin/sh", arguments, prestaged, poststaged, "stderr.txt", "stdout.txt", cb);
}
@Test
public void serializesToJSON() throws IOException {
assertThat("a JobSubmitRequest can be serialized to JSON", asJson(request),
is(equalTo(jsonFixture("fixtures/request.json"))));
}
@Test
public void deserializesFromJSON() throws IOException {
assertThat("a JobSubmitRequest can be deserialized from JSON",
fromJson(jsonFixture("fixtures/request.json"), JobSubmitRequest.class), is(request));
}
@Test
public void deserializedFromJson_WithoutCallback() throws IOException {
request.status_callback_url = null;
assertThat("a JobSubmitRequest can be deserialized from JSON",
fromJson(jsonFixture("fixtures/request.nocallback.json"), JobSubmitRequest.class), is(request));
}
@Test
public void testEquals() {
assertThat(request.equals(null)).isFalse();
assertThat(request.equals("string")).isFalse();
assertTrue(request.equals(request));
assertTrue(request.equals(sampleRequest()));
JobSubmitRequest r2 = sampleRequest();
r2.executable = "/bin/bash";
assertFalse(request.equals(r2));
JobSubmitRequest r3 = sampleRequest();
r3.jobdir = "/tmp/jobdir2";
assertFalse(request.equals(r3));
JobSubmitRequest r4 = sampleRequest();
r4.stderr = "error.log";
assertFalse(request.equals(r4));
JobSubmitRequest r5 = sampleRequest();
r5.stdout = "out.log";
assertFalse(request.equals(r5));
JobSubmitRequest r6 = sampleRequest();
r6.arguments.add("bla");
assertFalse(request.equals(r6));
JobSubmitRequest r7 = sampleRequest();
r7.prestaged.add("somefile");
assertFalse(request.equals(r7));
JobSubmitRequest r8 = sampleRequest();
r8.poststaged.add("somefile");
assertFalse(request.equals(r8));
try {
JobSubmitRequest r9 = sampleRequest();
r9.status_callback_url = new URI("http://example.com");
assertFalse(request.equals(r9));
} catch (URISyntaxException e) {
fail();
}
}
@Test
public void testToString() {
String s =
"JobSubmitRequest{jobdir=/tmp/jobdir, executable=/bin/sh, stderr=stderr.txt, stdout=stdout.txt, arguments=[runme.sh], prestaged=[runme.sh, input.dat], poststaged=[output.dat], status_callback_url=http://localhost/status}";
assertEquals(s, request.toString());
}
@Test
public void toJobDescription() {
JobDescription description = request.toJobDescription();
JobDescription expected_description = new JobDescription();
expected_description.setArguments("runme.sh");
expected_description.setExecutable("/bin/sh");
// FIXME when https://github.com/NLeSC/octopus/issues/53 is resolved then remove ignore
assertThat(description.toString()).isEqualTo(expected_description.toString());
}
@Ignore("Test is ignored because Sandbox is missing equals() method")
@Test
public void toSandbox() throws OctopusIOException, OctopusException {
String sandboxId = "octopus-sandbox-1234567890";
Octopus octopus = mock(Octopus.class);
Files filesEngine = mock(Files.class);
when(octopus.files()).thenReturn(filesEngine);
FileSystem filesystem = mock(FileSystem.class);
when(filesEngine.newPath(filesystem, new RelativePath(new String[] { "/tmp/jobdir", "runme.sh" }))).thenReturn(
new AbsolutePathImplementation(filesystem, new RelativePath("/tmp/jobdir/runme.sh")));
when(filesEngine.newPath(filesystem, new RelativePath(new String[] { "/tmp/jobdir", "input.dat" }))).thenReturn(
new AbsolutePathImplementation(filesystem, new RelativePath("/tmp/jobdir/input.dat")));
when(filesEngine.newPath(filesystem, new RelativePath(new String[] { "/tmp/jobdir", "stderr.txt" }))).thenReturn(
new AbsolutePathImplementation(filesystem, new RelativePath("/tmp/jobdir/stderr.txt")));
when(filesEngine.newPath(filesystem, new RelativePath(new String[] { "/tmp/jobdir", "stdout.txt" }))).thenReturn(
new AbsolutePathImplementation(filesystem, new RelativePath("/tmp/jobdir/stdout.txt")));
when(filesEngine.newPath(filesystem, new RelativePath(new String[] { "/tmp/jobdir", "output.dat" }))).thenReturn(
new AbsolutePathImplementation(filesystem, new RelativePath("/tmp/jobdir/output.dat")));
AbsolutePath sandBoxRoot = new AbsolutePathImplementation(filesystem, new RelativePath("/tmp"));
Sandbox sandbox = request.toSandbox(octopus, sandBoxRoot, sandboxId);
Sandbox expected = new Sandbox(octopus, sandBoxRoot, sandboxId);
expected.addUploadFile(new AbsolutePathImplementation(filesystem, new RelativePath("/tmp/jobdir/runme.sh")), "runme.sh");
expected.addUploadFile(new AbsolutePathImplementation(filesystem, new RelativePath("/tmp/jobdir/input.dat")), "input.dat");
expected.addDownloadFile("output.dat", new AbsolutePathImplementation(filesystem, new RelativePath(
"/tmp/jobdir/output.dat")));
expected.addDownloadFile("stderr.txt", new AbsolutePathImplementation(filesystem, new RelativePath(
"/tmp/jobdir/stderr.txt")));
expected.addDownloadFile("stdout.txt", new AbsolutePathImplementation(filesystem, new RelativePath(
"/tmp/jobdir/stdout.txt")));
// FIXME when https://github.com/NLeSC/octopus/issues/53 is resolved then remove ignore
assertThat(sandbox).isEqualTo(expected);
}
}
| |
/*
* (c) Copyright Christian P. Fries, Germany. All rights reserved. Contact: email@christian-fries.de.
*
* Created on 09.02.2004
*/
package net.finmath.montecarlo;
import java.io.Serializable;
import java.util.function.DoubleUnaryOperator;
import java.util.function.IntFunction;
import net.finmath.randomnumbers.MersenneTwister;
import net.finmath.stochastic.RandomVariableInterface;
import net.finmath.time.TimeDiscretizationInterface;
/**
* Implementation of a time-discrete n-dimensional sequence of independent increments
* <i>W = (W<sub>1</sub>,...,W<sub>n</sub>)</i> form a given set of inverse
* cumulative distribution functions.
*
* Independent increments is a sequence of independent random variables index
* by the time index associated with the time discretization. At each time step
* the increment is a d-dimensional random variable \( Z(t_{i}) \), where d is <code>numberOfFactors</code>.
* where each component of \( Z_{j}(t_{i}) \) is given by
* \[
* Z_{j}(t_{i}) = ICDF_{i,j}(U_{i,j})
* \]
* for a sequence of independent uniform distributes random variables U_{i,j}.
*
* The inverse cumulative distribution functions \( ICDF_{i,j} \) are given by
* <code>inverseCumulativeDistributionFunctions</code> as the
* map \( i \mapsto ( j \mapsto ICDF_{i,j} ) \) (here i is the time index and j is the factor (component).
*
* Each \( U_{i,j} \) is samples using <code>numberOfPaths</code>.
*
* The class is immutable and thread safe. It uses lazy initialization.
*
* @author Christian Fries
* @version 1.6
*/
public class IndependentIncrements implements IndependentIncrementsInterface, Serializable {
/**
*
*/
private static final long serialVersionUID = 6270884840989559532L;
private final TimeDiscretizationInterface timeDiscretization;
private final int numberOfFactors;
private final int numberOfPaths;
private final int seed;
private final AbstractRandomVariableFactory randomVariableFactory;
private transient RandomVariableInterface[][] increments;
private final Object incrementsLazyInitLock = new Object();
private final IntFunction<IntFunction<DoubleUnaryOperator>> inverseCumulativeDistributionFunctions;
/**
* Construct the simulation of independent increments.
*
* Independent increments is a sequence of independent random variables index
* by the time index associated with the time discretization. At each time step
* the increment is a d-dimensional random variable \( Z(t_{i}) \), where d is <code>numberOfFactors</code>.
* where each component of \( Z_{j}(t_{i}) \) is given by
* \[
* Z_{j}(t_{i}) = ICDF_{i,j}(U_{i,j})
* \]
* for a sequence of independent uniform distributes random variables U_{i,j}.
*
* The inverse cumulative distribution functions \( ICDF_{i,j} \) are given by
* <code>inverseCumulativeDistributionFunctions</code> as the
* map \( i \mapsto ( j \mapsto ICDF_{i,j} ) \) (here i is the time index and j is the factor (component).
*
* Each \( U_{i,j} \) is samples using <code>numberOfPaths</code>.
*
* The constructor allows to set the factory to be used for the construction of
* random variables. This allows to generate increments represented
* by different implementations of the RandomVariableInterface
* (e.g. the RandomVariableLowMemory internally using float representations).
*
* @param timeDiscretization The time discretization used for the increments.
* @param numberOfFactors Number of factors.
* @param numberOfPaths Number of paths to simulate.
* @param seed The seed of the random number generator.
* @param inverseCumulativeDistributionFunctions A map from the timeIndices to a map from the from the factors to the corresponding inverse cumulative distribution function.
* @param randomVariableFactory Factory to be used to create random variable.
*/
public IndependentIncrements(
TimeDiscretizationInterface timeDiscretization,
int numberOfFactors,
int numberOfPaths,
int seed,
IntFunction<IntFunction<DoubleUnaryOperator>> inverseCumulativeDistributionFunctions,
AbstractRandomVariableFactory randomVariableFactory) {
super();
this.timeDiscretization = timeDiscretization;
this.numberOfFactors = numberOfFactors;
this.numberOfPaths = numberOfPaths;
this.seed = seed;
this.inverseCumulativeDistributionFunctions = inverseCumulativeDistributionFunctions;
this.randomVariableFactory = randomVariableFactory;
this.increments = null; // Lazy initialization
}
/**
* Construct the simulation of independet increments.
*
* The independent increments is a sequence of independent random variables index
* by the time index associated with the time discretization. At each time step
* the increment is a d-dimensional random variable \( Z(t_{i}) \), where d is <code>numberOfFactors</code>.
* where each component of \( Z_{j}(t_{i}) \) is given by
* \[
* Z_{j}(t_{i}) = ICDF_{i,j}(U_{i,j})
* \]
* for a sequence of independent uniform distributes random variables U_{i,j}.
*
* The inverse cumulative distribution functions \( ICDF_{i,j} \) are given by
* <code>inverseCumulativeDistributionFunctions</code> as the
* map \( i \mapsto ( j \mapsto ICDF_{i,j} ) \) (here i is the time index and j is the factor (component).
*
* Each \( U_{i,j} \) is samples using <code>numberOfPaths</code>.
*
* @param timeDiscretization The time discretization used for the increments.
* @param numberOfFactors Number of factors.
* @param numberOfPaths Number of paths to simulate.
* @param seed The seed of the random number generator.
* @param inverseCumulativeDistributionFunctions A map from the timeIndices to a map from the from the factors to the corresponding inverse cumulative distribution function.
*/
public IndependentIncrements(
TimeDiscretizationInterface timeDiscretization,
int numberOfFactors,
int numberOfPaths,
int seed,
IntFunction<IntFunction<DoubleUnaryOperator>> inverseCumulativeDistributionFunctions) {
this(timeDiscretization, numberOfFactors, numberOfPaths, seed, inverseCumulativeDistributionFunctions, new RandomVariableFactory());
}
@Override
public IndependentIncrementsInterface getCloneWithModifiedSeed(int seed) {
return new IndependentIncrements(getTimeDiscretization(), getNumberOfFactors(), getNumberOfPaths(), seed, inverseCumulativeDistributionFunctions, randomVariableFactory);
}
@Override
public IndependentIncrementsInterface getCloneWithModifiedTimeDiscretization(TimeDiscretizationInterface newTimeDiscretization) {
return new IndependentIncrements(newTimeDiscretization, getNumberOfFactors(), getNumberOfPaths(), getSeed(), inverseCumulativeDistributionFunctions, randomVariableFactory);
}
@Override
public RandomVariableInterface getIncrement(int timeIndex, int factor) {
// Thread safe lazy initialization
synchronized(incrementsLazyInitLock) {
if(increments == null) doGenerateIncrements();
}
/*
* We return an immutable object which ensures that the receiver does not alter the data.
*/
return increments[timeIndex][factor];
}
/**
* Lazy initialization of brownianIncrement. Synchronized to ensure thread safety of lazy init.
*/
private void doGenerateIncrements() {
if(increments != null) return; // Nothing to do
// Create random number sequence generator
MersenneTwister mersenneTwister = new MersenneTwister(seed);
// Allocate memory
double[][][] incrementsArray = new double[timeDiscretization.getNumberOfTimeSteps()][numberOfFactors][numberOfPaths];
// Pre-fetch icdfs
DoubleUnaryOperator[][] inverseCumulativeDistributionFunctions = new DoubleUnaryOperator[timeDiscretization.getNumberOfTimeSteps()][numberOfFactors];
for(int timeIndex=0; timeIndex<timeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
for(int factor=0; factor<numberOfFactors; factor++) {
inverseCumulativeDistributionFunctions[timeIndex][factor] = this.inverseCumulativeDistributionFunctions.apply(timeIndex).apply(factor);
}
}
/*
* Generate normal distributed independent increments.
*
* The inner loop goes over time and factors.
* MersenneTwister is known to generate "independent" increments in 623 dimensions.
* Since we want to generate independent streams (paths), the loop over path is the outer loop.
*/
for(int path=0; path<numberOfPaths; path++) {
for(int timeIndex=0; timeIndex<timeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
// Generate uncorrelated Brownian increment
for(int factor=0; factor<numberOfFactors; factor++) {
double uniformIncrement = mersenneTwister.nextDouble();
incrementsArray[timeIndex][factor][path] = inverseCumulativeDistributionFunctions[timeIndex][factor].applyAsDouble(uniformIncrement);
}
}
}
// Allocate memory for RandomVariable wrapper objects.
increments = new RandomVariableInterface[timeDiscretization.getNumberOfTimeSteps()][numberOfFactors];
// Wrap the values in RandomVariable objects
for(int timeIndex=0; timeIndex<timeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
double time = timeDiscretization.getTime(timeIndex+1);
for(int factor=0; factor<numberOfFactors; factor++) {
increments[timeIndex][factor] =
randomVariableFactory.createRandomVariable(time, incrementsArray[timeIndex][factor]);
}
}
}
@Override
public TimeDiscretizationInterface getTimeDiscretization() {
return timeDiscretization;
}
@Override
public int getNumberOfFactors() {
return numberOfFactors;
}
@Override
public int getNumberOfPaths() {
return numberOfPaths;
}
@Override
public RandomVariableInterface getRandomVariableForConstant(double value) {
return randomVariableFactory.createRandomVariable(value);
}
/**
* @return Returns the seed.
*/
public int getSeed() {
return seed;
}
public String toString() {
return super.toString()
+ "\n" + "timeDiscretization: " + timeDiscretization.toString()
+ "\n" + "numberOfPaths: " + numberOfPaths
+ "\n" + "numberOfFactors: " + numberOfFactors
+ "\n" + "seed: " + seed;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
IndependentIncrements that = (IndependentIncrements) o;
if (numberOfFactors != that.numberOfFactors) return false;
if (numberOfPaths != that.numberOfPaths) return false;
if (seed != that.seed) return false;
if (!timeDiscretization.equals(that.timeDiscretization)) return false;
return true;
}
@Override
public int hashCode() {
int result = timeDiscretization.hashCode();
result = 31 * result + numberOfFactors;
result = 31 * result + numberOfPaths;
result = 31 * result + seed;
return result;
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.haskell;
import com.facebook.buck.cxx.CxxHeadersDir;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxPreprocessables;
import com.facebook.buck.cxx.CxxPreprocessorDep;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.cxx.NativeLinkable;
import com.facebook.buck.cxx.NativeLinkableInput;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.CommonDescriptionArg;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.HasDeclaredDeps;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.util.RichStream;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.facebook.buck.versions.VersionPropagator;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import org.immutables.value.Value;
public class HaskellPrebuiltLibraryDescription
implements Description<HaskellPrebuiltLibraryDescriptionArg>,
VersionPropagator<HaskellPrebuiltLibraryDescriptionArg> {
@Override
public Class<HaskellPrebuiltLibraryDescriptionArg> getConstructorArgType() {
return HaskellPrebuiltLibraryDescriptionArg.class;
}
@Override
public BuildRule createBuildRule(
TargetGraph targetGraph,
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
BuildRuleResolver resolver,
CellPathResolver cellRoots,
final HaskellPrebuiltLibraryDescriptionArg args)
throws NoSuchBuildTargetException {
return new PrebuiltHaskellLibrary(buildTarget, projectFilesystem, params) {
private final LoadingCache<CxxPlatform, ImmutableMap<BuildTarget, CxxPreprocessorInput>>
transitiveCxxPreprocessorInputCache =
CxxPreprocessables.getTransitiveCxxPreprocessorInputCache(this);
@Override
public Iterable<BuildRule> getCompileDeps(CxxPlatform cxxPlatform) {
return RichStream.from(args.getDeps())
.map(resolver::getRule)
.filter(HaskellCompileDep.class::isInstance)
.toImmutableList();
}
@Override
public HaskellCompileInput getCompileInput(
CxxPlatform cxxPlatform, Linker.LinkableDepType depType, boolean hsProfile)
throws NoSuchBuildTargetException {
ImmutableCollection<SourcePath> libs = null;
if (Linker.LinkableDepType.SHARED == depType) {
libs = args.getSharedLibs().values();
} else {
if (hsProfile) {
libs = args.getProfiledStaticLibs();
} else {
libs = args.getStaticLibs();
}
}
return HaskellCompileInput.builder()
.addAllFlags(args.getExportedCompilerFlags())
.addPackages(
HaskellPackage.builder()
.setInfo(
HaskellPackageInfo.of(
getBuildTarget().getShortName(), args.getVersion(), args.getId()))
.setPackageDb(args.getDb())
.addAllInterfaces(args.getImportDirs())
.addAllLibraries(libs)
.build())
.build();
}
@Override
public Iterable<? extends NativeLinkable> getNativeLinkableDeps() {
return ImmutableList.of();
}
@Override
public Iterable<? extends NativeLinkable> getNativeLinkableExportedDeps() {
return FluentIterable.from(getDeclaredDeps()).filter(NativeLinkable.class);
}
@Override
public NativeLinkableInput getNativeLinkableInput(
CxxPlatform cxxPlatform,
Linker.LinkableDepType type,
boolean forceLinkWhole,
ImmutableSet<NativeLinkable.LanguageExtensions> languageExtensions)
throws NoSuchBuildTargetException {
NativeLinkableInput.Builder builder = NativeLinkableInput.builder();
builder.addAllArgs(StringArg.from(args.getExportedLinkerFlags()));
if (type == Linker.LinkableDepType.SHARED) {
builder.addAllArgs(SourcePathArg.from(args.getSharedLibs().values()));
} else {
Linker linker = cxxPlatform.getLd().resolve(resolver);
ImmutableList<Arg> libArgs = SourcePathArg.from(args.getStaticLibs());
if (forceLinkWhole) {
libArgs =
RichStream.from(libArgs)
.flatMap(lib -> RichStream.from(linker.linkWhole(lib)))
.toImmutableList();
}
builder.addAllArgs(libArgs);
}
return builder.build();
}
@Override
public Linkage getPreferredLinkage(CxxPlatform cxxPlatform) {
return Linkage.ANY;
}
@Override
public ImmutableMap<String, SourcePath> getSharedLibraries(CxxPlatform cxxPlatform) {
return args.getSharedLibs();
}
@Override
public Iterable<CxxPreprocessorDep> getCxxPreprocessorDeps(CxxPlatform cxxPlatform) {
return FluentIterable.from(getBuildDeps()).filter(CxxPreprocessorDep.class);
}
@Override
public CxxPreprocessorInput getCxxPreprocessorInput(CxxPlatform cxxPlatform)
throws NoSuchBuildTargetException {
CxxPreprocessorInput.Builder builder = CxxPreprocessorInput.builder();
for (SourcePath headerDir : args.getCxxHeaderDirs()) {
builder.addIncludes(CxxHeadersDir.of(CxxPreprocessables.IncludeType.SYSTEM, headerDir));
}
return builder.build();
}
@Override
public ImmutableMap<BuildTarget, CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
CxxPlatform cxxPlatform) throws NoSuchBuildTargetException {
return transitiveCxxPreprocessorInputCache.getUnchecked(cxxPlatform);
}
};
}
@BuckStyleImmutable
@Value.Immutable
interface AbstractHaskellPrebuiltLibraryDescriptionArg
extends CommonDescriptionArg, HasDeclaredDeps {
String getVersion();
@Value.Default
default String getId() {
return String.format("%s-%s", getName(), getVersion());
}
SourcePath getDb();
ImmutableList<SourcePath> getImportDirs();
ImmutableList<SourcePath> getStaticLibs();
ImmutableList<SourcePath> getProfiledStaticLibs();
ImmutableMap<String, SourcePath> getSharedLibs();
ImmutableList<String> getExportedLinkerFlags();
ImmutableList<String> getExportedCompilerFlags();
@Value.NaturalOrder
ImmutableSortedSet<SourcePath> getCxxHeaderDirs();
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.kudu.client;
import com.google.common.collect.ImmutableList;
import org.apache.kudu.ColumnSchema;
import org.apache.kudu.Schema;
import org.apache.kudu.Type;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
import static org.apache.kudu.util.ClientTestUtil.countScanTokenRows;
import static org.apache.kudu.util.ClientTestUtil.createDefaultTable;
import static org.apache.kudu.util.ClientTestUtil.createManyStringsSchema;
import static org.apache.kudu.util.ClientTestUtil.loadDefaultTable;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestScanToken extends BaseKuduTest {
private static final String testTableName = "TestScanToken";
/**
* Tests scan tokens by creating a set of scan tokens, serializing them, and
* then executing them in parallel with separate client instances. This
* simulates the normal usecase of scan tokens being created at a central
* planner and distributed to remote task executors.
*/
@Test
public void testScanTokens() throws Exception {
int saveFetchTablets = AsyncKuduClient.FETCH_TABLETS_PER_RANGE_LOOKUP;
try {
// For this test, make sure that we cover the case that not all tablets
// are returned in a single batch.
AsyncKuduClient.FETCH_TABLETS_PER_RANGE_LOOKUP = 4;
Schema schema = createManyStringsSchema();
CreateTableOptions createOptions = new CreateTableOptions();
createOptions.addHashPartitions(ImmutableList.of("key"), 8);
PartialRow splitRow = schema.newPartialRow();
splitRow.addString("key", "key_50");
createOptions.addSplitRow(splitRow);
syncClient.createTable(testTableName, schema, createOptions);
KuduSession session = syncClient.newSession();
session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_BACKGROUND);
KuduTable table = syncClient.openTable(testTableName);
for (int i = 0; i < 100; i++) {
Insert insert = table.newInsert();
PartialRow row = insert.getRow();
row.addString("key", String.format("key_%02d", i));
row.addString("c1", "c1_" + i);
row.addString("c2", "c2_" + i);
session.apply(insert);
}
session.flush();
KuduScanToken.KuduScanTokenBuilder tokenBuilder = syncClient.newScanTokenBuilder(table);
tokenBuilder.batchSizeBytes(0);
tokenBuilder.setProjectedColumnIndexes(ImmutableList.<Integer>of());
List<KuduScanToken> tokens = tokenBuilder.build();
assertEquals(16, tokens.size());
// KUDU-1809, with batchSizeBytes configured to '0',
// the first call to the tablet server won't return
// any data.
{
KuduScanner scanner = tokens.get(0).intoScanner(syncClient);
assertEquals(0, scanner.nextRows().getNumRows());
}
for (KuduScanToken token : tokens) {
// Sanity check to make sure the debug printing does not throw.
LOG.debug(KuduScanToken.stringifySerializedToken(token.serialize(), syncClient));
}
} finally {
AsyncKuduClient.FETCH_TABLETS_PER_RANGE_LOOKUP = saveFetchTablets;
}
}
/**
* Tests scan token creation and execution on a table with non-covering range partitions.
*/
@Test
public void testScanTokensNonCoveringRangePartitions() throws Exception {
Schema schema = createManyStringsSchema();
CreateTableOptions createOptions = new CreateTableOptions();
createOptions.addHashPartitions(ImmutableList.of("key"), 2);
PartialRow lower = schema.newPartialRow();
PartialRow upper = schema.newPartialRow();
lower.addString("key", "a");
upper.addString("key", "f");
createOptions.addRangePartition(lower, upper);
lower = schema.newPartialRow();
upper = schema.newPartialRow();
lower.addString("key", "h");
upper.addString("key", "z");
createOptions.addRangePartition(lower, upper);
PartialRow split = schema.newPartialRow();
split.addString("key", "k");
createOptions.addSplitRow(split);
syncClient.createTable(testTableName, schema, createOptions);
KuduSession session = syncClient.newSession();
session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_BACKGROUND);
KuduTable table = syncClient.openTable(testTableName);
for (char c = 'a'; c < 'f'; c++) {
Insert insert = table.newInsert();
PartialRow row = insert.getRow();
row.addString("key", "" + c);
row.addString("c1", "c1_" + c);
row.addString("c2", "c2_" + c);
session.apply(insert);
}
for (char c = 'h'; c < 'z'; c++) {
Insert insert = table.newInsert();
PartialRow row = insert.getRow();
row.addString("key", "" + c);
row.addString("c1", "c1_" + c);
row.addString("c2", "c2_" + c);
session.apply(insert);
}
session.flush();
KuduScanToken.KuduScanTokenBuilder tokenBuilder = syncClient.newScanTokenBuilder(table);
tokenBuilder.setProjectedColumnIndexes(ImmutableList.<Integer>of());
List<KuduScanToken> tokens = tokenBuilder.build();
assertEquals(6, tokens.size());
assertEquals('f' - 'a' + 'z' - 'h',
countScanTokenRows(tokens,
syncClient.getMasterAddressesAsString(),
syncClient.getDefaultOperationTimeoutMs()));
for (KuduScanToken token : tokens) {
// Sanity check to make sure the debug printing does not throw.
LOG.debug(KuduScanToken.stringifySerializedToken(token.serialize(), syncClient));
}
}
/**
* Tests the results of creating scan tokens, altering the columns being
* scanned, and then executing the scan tokens.
*/
@Test
public void testScanTokensConcurrentAlterTable() throws Exception {
Schema schema = new Schema(ImmutableList.of(
new ColumnSchema.ColumnSchemaBuilder("key", Type.INT64).nullable(false).key(true).build(),
new ColumnSchema.ColumnSchemaBuilder("a", Type.INT64).nullable(false).key(false).build()
));
CreateTableOptions createOptions = new CreateTableOptions();
createOptions.setRangePartitionColumns(ImmutableList.<String>of());
createOptions.setNumReplicas(1);
syncClient.createTable(testTableName, schema, createOptions);
KuduTable table = syncClient.openTable(testTableName);
KuduScanToken.KuduScanTokenBuilder tokenBuilder = syncClient.newScanTokenBuilder(table);
List<KuduScanToken> tokens = tokenBuilder.build();
assertEquals(1, tokens.size());
KuduScanToken token = tokens.get(0);
// Drop a column
syncClient.alterTable(testTableName, new AlterTableOptions().dropColumn("a"));
try {
token.intoScanner(syncClient);
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Unknown column"));
}
// Add back the column with the wrong type.
syncClient.alterTable(
testTableName,
new AlterTableOptions().addColumn(
new ColumnSchema.ColumnSchemaBuilder("a", Type.STRING).nullable(true).build()));
try {
token.intoScanner(syncClient);
fail();
} catch (IllegalStateException e) {
assertTrue(e.getMessage().contains(
"invalid type INT64 for column 'a' in scan token, expected: STRING"));
}
// Add the column with the wrong nullability.
syncClient.alterTable(
testTableName,
new AlterTableOptions().dropColumn("a")
.addColumn(new ColumnSchema.ColumnSchemaBuilder("a", Type.INT64)
.nullable(true).build()));
try {
token.intoScanner(syncClient);
fail();
} catch (IllegalStateException e) {
assertTrue(e.getMessage().contains(
"invalid nullability for column 'a' in scan token, expected: NOT NULL"));
}
// Add the column with the correct type and nullability.
syncClient.alterTable(
testTableName,
new AlterTableOptions().dropColumn("a")
.addColumn(new ColumnSchema.ColumnSchemaBuilder("a", Type.INT64)
.nullable(false)
.defaultValue(0L).build()));
token.intoScanner(syncClient);
}
/** Test that scanRequestTimeout makes it from the scan token to the underlying Scanner class. */
@Test
public void testScanRequestTimeout() throws IOException {
final int NUM_ROWS_DESIRED = 100;
final int SCAN_REQUEST_TIMEOUT_MS = 20;
KuduTable table = createDefaultTable(syncClient, testTableName);
loadDefaultTable(syncClient, testTableName, NUM_ROWS_DESIRED);
KuduScanToken.KuduScanTokenBuilder builder =
new KuduScanToken.KuduScanTokenBuilder(client, table);
builder.scanRequestTimeout(SCAN_REQUEST_TIMEOUT_MS);
List<KuduScanToken> tokens = builder.build();
for (KuduScanToken token : tokens) {
byte[] serialized = token.serialize();
KuduScanner scanner = KuduScanToken.deserializeIntoScanner(serialized, syncClient);
assertEquals(SCAN_REQUEST_TIMEOUT_MS, scanner.getScanRequestTimeout());
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.wiki.providers;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import org.apache.log4j.Logger;
import org.apache.wiki.PageManager;
import org.apache.wiki.WikiContext;
import org.apache.wiki.WikiEngine;
import org.apache.wiki.WikiPage;
import org.apache.wiki.api.exceptions.NoRequiredPropertyException;
import org.apache.wiki.api.exceptions.ProviderException;
import org.apache.wiki.parser.MarkupParser;
import org.apache.wiki.render.RenderingManager;
import org.apache.wiki.search.QueryItem;
import org.apache.wiki.util.ClassUtil;
import org.apache.wiki.util.TextUtil;
import java.io.IOException;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.TreeSet;
/**
* Provides a caching page provider. This class rests on top of a
* real provider class and provides a cache to speed things up. Only
* if the cache copy of the page text has expired, we fetch it from
* the provider.
* <p>
* This class does not detect if someone has modified the page
* externally, not through JSPWiki routines.
* <p>
* Heavily based on ideas by Chris Brooking.
* <p>
* Since 2.10 uses the Ehcache library.
*
* @since 1.6.4
*/
// FIXME: Synchronization is a bit inconsistent in places.
// FIXME: A part of the stuff is now redundant, since we could easily use the text cache
// for a lot of things. RefactorMe.
public class CachingProvider implements WikiPageProvider {
private static final Logger log = Logger.getLogger( CachingProvider.class );
private CacheManager m_cacheManager = CacheManager.getInstance();
private WikiPageProvider m_provider;
// FIXME: Find another way to the search engine to use instead of from WikiEngine?
private WikiEngine m_engine;
private Cache m_cache;
/** Name of the regular page cache. */
public static final String CACHE_NAME = "jspwiki.pageCache";
private Cache m_textCache;
/** Name of the page text cache. */
public static final String TEXTCACHE_NAME = "jspwiki.pageTextCache";
private Cache m_historyCache;
/** Name of the page history cache. */
public static final String HISTORYCACHE_NAME = "jspwiki.pageHistoryCache";
private long m_cacheMisses = 0;
private long m_cacheHits = 0;
private long m_historyCacheMisses = 0;
private long m_historyCacheHits = 0;
// FIXME: This MUST be cached somehow.
private boolean m_gotall = false;
// The default settings of the caches, if you want something else, provide an "ehcache.xml" file
// Please note that JSPWiki ships with a default "ehcache.xml" in the classpath
public static final int DEFAULT_CACHECAPACITY = 1000; // Good most wikis
public static final int DEFAULT_CACHETIMETOLIVESECONDS = 24*3600;
public static final int DEFAULT_CACHETIMETOIDLESECONDS = 24*3600;
/**
* {@inheritDoc}
*/
public void initialize( WikiEngine engine, Properties properties )
throws NoRequiredPropertyException, IOException {
log.debug("Initing CachingProvider");
// engine is used for getting the search engine
m_engine = engine;
String cacheName = engine.getApplicationName() + "." + CACHE_NAME;
if (m_cacheManager.cacheExists(cacheName)) {
m_cache = m_cacheManager.getCache(cacheName);
} else {
log.info("cache with name " + cacheName + " not found in ehcache.xml, creating it with defaults.");
m_cache = new Cache(cacheName, DEFAULT_CACHECAPACITY, false, false, DEFAULT_CACHETIMETOLIVESECONDS, DEFAULT_CACHETIMETOIDLESECONDS);
m_cacheManager.addCache(m_cache);
}
String textCacheName = engine.getApplicationName() + "." + TEXTCACHE_NAME;
if (m_cacheManager.cacheExists(textCacheName)) {
m_textCache= m_cacheManager.getCache(textCacheName);
} else {
log.info("cache with name " + textCacheName + " not found in ehcache.xml, creating it with defaults.");
m_textCache = new Cache(textCacheName, DEFAULT_CACHECAPACITY, false, false, DEFAULT_CACHETIMETOLIVESECONDS, DEFAULT_CACHETIMETOIDLESECONDS);
m_cacheManager.addCache(m_textCache);
}
String historyCacheName = engine.getApplicationName() + "." + HISTORYCACHE_NAME;
if (m_cacheManager.cacheExists(historyCacheName)) {
m_historyCache= m_cacheManager.getCache(historyCacheName);
} else {
log.info("cache with name " + historyCacheName + " not found in ehcache.xml, creating it with defaults.");
m_historyCache = new Cache(historyCacheName, DEFAULT_CACHECAPACITY, false, false, DEFAULT_CACHETIMETOLIVESECONDS, DEFAULT_CACHETIMETOIDLESECONDS);
m_cacheManager.addCache(m_historyCache);
}
//
// m_cache.getCacheEventNotificationService().registerListener(new CacheItemCollector());
//
// Find and initialize real provider.
//
String classname = TextUtil.getRequiredProperty( properties, PageManager.PROP_PAGEPROVIDER );
try
{
Class< ? > providerclass = ClassUtil.findClass( "org.apache.wiki.providers", classname);
m_provider = (WikiPageProvider)providerclass.newInstance();
log.debug("Initializing real provider class "+m_provider);
m_provider.initialize( engine, properties );
}
catch( ClassNotFoundException e )
{
log.error("Unable to locate provider class "+classname,e);
throw new IllegalArgumentException("no provider class", e);
}
catch( InstantiationException e )
{
log.error("Unable to create provider class "+classname,e);
throw new IllegalArgumentException("faulty provider class", e);
}
catch( IllegalAccessException e )
{
log.error("Illegal access to provider class "+classname,e);
throw new IllegalArgumentException("illegal provider class", e);
}
}
private WikiPage getPageInfoFromCache(String name) throws ProviderException {
// Sanity check; seems to occur sometimes
if (name == null) return null;
Element cacheElement = m_cache.get(name);
if (cacheElement == null) {
WikiPage refreshed = m_provider.getPageInfo(name, WikiPageProvider.LATEST_VERSION);
if (refreshed != null) {
m_cache.put(new Element(name, refreshed));
return refreshed;
} else {
// page does not exist anywhere
return null;
}
}
return (WikiPage) cacheElement.getObjectValue();
}
/**
* {@inheritDoc}
*/
public boolean pageExists( String pageName, int version )
{
if( pageName == null ) return false;
WikiPage p = null;
try
{
p = getPageInfoFromCache( pageName );
}
catch( ProviderException e )
{
log.info("Provider failed while trying to check if page exists: "+pageName);
return false;
}
if( p != null )
{
int latestVersion = p.getVersion();
if( version == latestVersion || version == LATEST_VERSION )
{
return true;
}
return m_provider.pageExists( pageName, version );
}
try
{
return getPageInfo( pageName, version ) != null;
}
catch( ProviderException e )
{}
return false;
}
/**
* {@inheritDoc}
*/
public boolean pageExists( String pageName )
{
if( pageName == null ) return false;
WikiPage p = null;
try
{
p = getPageInfoFromCache( pageName );
}
catch( ProviderException e )
{
log.info("Provider failed while trying to check if page exists: "+pageName);
return false;
}
//
// A null item means that the page either does not
// exist, or has not yet been cached; a non-null
// means that the page does exist.
//
if( p != null )
{
return true;
}
//
// If we have a list of all pages in memory, then any page
// not in the cache must be non-existent.
//
if( m_gotall )
{
return false;
}
//
// We could add the page to the cache here as well,
// but in order to understand whether that is a
// good thing or not we would need to analyze
// the JSPWiki calling patterns extensively. Presumably
// it would be a good thing if pageExists() is called
// many times before the first getPageText() is called,
// and the whole page is cached.
//
return m_provider.pageExists( pageName );
}
/**
* {@inheritDoc}
*/
public String getPageText( String pageName, int version )
throws ProviderException
{
String result = null;
if( pageName == null ) return null;
if( version == WikiPageProvider.LATEST_VERSION )
{
result = getTextFromCache( pageName );
}
else
{
WikiPage p = getPageInfoFromCache( pageName );
//
// Or is this the latest version fetched by version number?
//
if( p != null && p.getVersion() == version )
{
result = getTextFromCache( pageName );
}
else
{
result = m_provider.getPageText( pageName, version );
}
}
return result;
}
private String getTextFromCache(String pageName) throws ProviderException {
String text = null;
if (pageName == null) return null;
WikiPage page = getPageInfoFromCache(pageName);
Element cacheElement = m_textCache.get(pageName);
if (cacheElement != null) {
m_cacheHits++;
return (String) cacheElement.getObjectValue();
}
if (pageExists(pageName)) {
text = m_provider.getPageText(pageName, WikiPageProvider.LATEST_VERSION);
m_textCache.put(new Element(pageName, text));
m_cacheMisses++;
return text;
}
//page not found (not in cache, not by real provider)
return null;
}
/**
* {@inheritDoc}
*/
public void putPageText(WikiPage page, String text) throws ProviderException {
synchronized (this) {
m_provider.putPageText(page, text);
page.setLastModified(new Date());
// Refresh caches properly
m_cache.remove(page.getName());
m_textCache.remove(page.getName());
m_historyCache.remove(page.getName());
getPageInfoFromCache(page.getName());
}
}
/**
* {@inheritDoc}
*/
public Collection getAllPages() throws ProviderException {
Collection all;
if (m_gotall == false) {
all = m_provider.getAllPages();
// Make sure that all pages are in the cache.
synchronized (this) {
for (Iterator i = all.iterator(); i.hasNext(); ) {
WikiPage p = (WikiPage) i.next();
m_cache.put(new Element(p.getName(), p));
}
m_gotall = true;
}
} else {
List<String> keys = m_cache.getKeysWithExpiryCheck();
all = new TreeSet<WikiPage>();
for (String key : keys) {
Element element = m_cache.get(key);
Object cachedPage = element.getObjectValue();
if (cachedPage != null) {
all.add((WikiPage) cachedPage);
}
}
}
if( all.size() >= m_cache.getCacheConfiguration().getMaxEntriesLocalHeap() ) {
log.warn( "seems " + m_cache.getName() + " can't hold all pages from your page repository, " +
"so we're delegating on the underlying provider instead. Please consider increasing " +
"your cache sizes on ehcache.xml to avoid this behaviour" );
return m_provider.getAllPages();
}
return all;
}
/**
* {@inheritDoc}
*/
public Collection getAllChangedSince( Date date )
{
return m_provider.getAllChangedSince( date );
}
/**
* {@inheritDoc}
*/
public int getPageCount()
throws ProviderException
{
return m_provider.getPageCount();
}
/**
* {@inheritDoc}
*/
public Collection findPages( QueryItem[] query )
{
//
// If the provider is a fast searcher, then
// just pass this request through.
//
return m_provider.findPages( query );
// FIXME: Does not implement fast searching
}
//
// FIXME: Kludge: make sure that the page is also parsed and it gets all the
// necessary variables.
//
private void refreshMetadata( WikiPage page )
{
if( page != null && !page.hasMetadata() )
{
RenderingManager mgr = m_engine.getRenderingManager();
try
{
String data = m_provider.getPageText(page.getName(), page.getVersion());
WikiContext ctx = new WikiContext( m_engine, page );
MarkupParser parser = mgr.getParser( ctx, data );
parser.parse();
}
catch( Exception ex )
{
log.debug("Failed to retrieve variables for wikipage "+page);
}
}
}
/**
* {@inheritDoc}
*/
public WikiPage getPageInfo( String pageName, int version ) throws ProviderException
{
WikiPage page = null;
WikiPage cached = getPageInfoFromCache( pageName );
int latestcached = (cached != null) ? cached.getVersion() : Integer.MIN_VALUE;
if( version == WikiPageProvider.LATEST_VERSION || version == latestcached )
{
if( cached == null )
{
WikiPage data = m_provider.getPageInfo( pageName, version );
if( data != null )
{
m_cache.put(new Element(pageName, data));
}
page = data;
}
else
{
page = cached;
}
}
else
{
// We do not cache old versions.
page = m_provider.getPageInfo( pageName, version );
//refreshMetadata( page );
}
refreshMetadata( page );
return page;
}
/**
* {@inheritDoc}
*/
public List getVersionHistory(String pageName) throws ProviderException {
List history = null;
if (pageName == null) return null;
Element element = m_historyCache.get(pageName);
if (element != null) {
m_historyCacheHits++;
history = (List) element.getObjectValue();
} else {
history = m_provider.getVersionHistory(pageName);
m_historyCache.put( new Element( pageName, history ));
m_historyCacheMisses++;
}
return history;
}
/**
* Gets the provider class name, and cache statistics (misscount and hitcount of page cache and history cache).
*
* @return A plain string with all the above mentioned values.
*/
public synchronized String getProviderInfo()
{
return "Real provider: "+m_provider.getClass().getName()+
". Cache misses: "+m_cacheMisses+
". Cache hits: "+m_cacheHits+
". History cache hits: "+m_historyCacheHits+
". History cache misses: "+m_historyCacheMisses;
}
/**
* {@inheritDoc}
*/
public void deleteVersion( String pageName, int version )
throws ProviderException
{
//
// Luckily, this is such a rare operation it is okay
// to synchronize against the whole thing.
//
synchronized( this )
{
WikiPage cached = getPageInfoFromCache( pageName );
int latestcached = (cached != null) ? cached.getVersion() : Integer.MIN_VALUE;
//
// If we have this version cached, remove from cache.
//
if( version == WikiPageProvider.LATEST_VERSION ||
version == latestcached )
{
m_cache.remove(pageName);
m_textCache.remove(pageName);
}
m_provider.deleteVersion( pageName, version );
m_historyCache.remove(pageName);
}
}
/**
* {@inheritDoc}
*/
public void deletePage( String pageName )
throws ProviderException
{
//
// See note in deleteVersion().
//
synchronized(this)
{
m_cache.put(new Element(pageName, null));
m_textCache.put(new Element( pageName, null ));
m_historyCache.put(new Element(pageName, null));
m_provider.deletePage(pageName);
}
}
/**
* {@inheritDoc}
*/
public void movePage(String from, String to) throws ProviderException {
m_provider.movePage(from, to);
synchronized (this) {
// Clear any cached version of the old page and new page
m_cache.remove(from);
m_textCache.remove(from);
m_historyCache.remove(from);
log.debug("Removing to page " + to + " from cache");
m_cache.remove(to);
m_textCache.remove(to);
m_historyCache.remove(to);
}
}
/**
* Returns the actual used provider.
* @since 2.0
* @return The real provider.
*/
public WikiPageProvider getRealProvider()
{
return m_provider;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.labservices.v2018_10_15;
import com.microsoft.azure.arm.model.HasInner;
import com.microsoft.azure.management.labservices.v2018_10_15.implementation.GalleryImageInner;
import com.microsoft.azure.arm.model.Indexable;
import com.microsoft.azure.arm.model.Refreshable;
import com.microsoft.azure.arm.model.Updatable;
import com.microsoft.azure.arm.model.Appliable;
import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.resources.models.HasManager;
import com.microsoft.azure.management.labservices.v2018_10_15.implementation.LabServicesManager;
import java.util.Map;
import org.joda.time.DateTime;
/**
* Type representing GalleryImage.
*/
public interface GalleryImage extends HasInner<GalleryImageInner>, Indexable, Refreshable<GalleryImage>, Updatable<GalleryImage.Update>, HasManager<LabServicesManager> {
/**
* @return the author value.
*/
String author();
/**
* @return the createdDate value.
*/
DateTime createdDate();
/**
* @return the description value.
*/
String description();
/**
* @return the icon value.
*/
String icon();
/**
* @return the id value.
*/
String id();
/**
* @return the imageReference value.
*/
GalleryImageReference imageReference();
/**
* @return the isEnabled value.
*/
Boolean isEnabled();
/**
* @return the isOverride value.
*/
Boolean isOverride();
/**
* @return the isPlanAuthorized value.
*/
Boolean isPlanAuthorized();
/**
* @return the latestOperationResult value.
*/
LatestOperationResult latestOperationResult();
/**
* @return the location value.
*/
String location();
/**
* @return the name value.
*/
String name();
/**
* @return the planId value.
*/
String planId();
/**
* @return the provisioningState value.
*/
String provisioningState();
/**
* @return the tags value.
*/
Map<String, String> tags();
/**
* @return the type value.
*/
String type();
/**
* @return the uniqueIdentifier value.
*/
String uniqueIdentifier();
/**
* The entirety of the GalleryImage definition.
*/
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithLabaccount, DefinitionStages.WithCreate {
}
/**
* Grouping of GalleryImage definition stages.
*/
interface DefinitionStages {
/**
* The first stage of a GalleryImage definition.
*/
interface Blank extends WithLabaccount {
}
/**
* The stage of the galleryimage definition allowing to specify Labaccount.
*/
interface WithLabaccount {
/**
* Specifies resourceGroupName, labAccountName.
* @param resourceGroupName The name of the resource group
* @param labAccountName The name of the lab Account
* @return the next definition stage
*/
WithCreate withExistingLabaccount(String resourceGroupName, String labAccountName);
}
/**
* The stage of the galleryimage definition allowing to specify IsEnabled.
*/
interface WithIsEnabled {
/**
* Specifies isEnabled.
* @param isEnabled Indicates whether this gallery image is enabled
* @return the next definition stage
*/
WithCreate withIsEnabled(Boolean isEnabled);
}
/**
* The stage of the galleryimage definition allowing to specify IsOverride.
*/
interface WithIsOverride {
/**
* Specifies isOverride.
* @param isOverride Indicates whether this gallery has been overridden for this lab account
* @return the next definition stage
*/
WithCreate withIsOverride(Boolean isOverride);
}
/**
* The stage of the galleryimage definition allowing to specify IsPlanAuthorized.
*/
interface WithIsPlanAuthorized {
/**
* Specifies isPlanAuthorized.
* @param isPlanAuthorized Indicates if the plan has been authorized for programmatic deployment
* @return the next definition stage
*/
WithCreate withIsPlanAuthorized(Boolean isPlanAuthorized);
}
/**
* The stage of the galleryimage definition allowing to specify Location.
*/
interface WithLocation {
/**
* Specifies location.
* @param location The location of the resource
* @return the next definition stage
*/
WithCreate withLocation(String location);
}
/**
* The stage of the galleryimage definition allowing to specify ProvisioningState.
*/
interface WithProvisioningState {
/**
* Specifies provisioningState.
* @param provisioningState The provisioning status of the resource
* @return the next definition stage
*/
WithCreate withProvisioningState(String provisioningState);
}
/**
* The stage of the galleryimage definition allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags The tags of the resource
* @return the next definition stage
*/
WithCreate withTags(Map<String, String> tags);
}
/**
* The stage of the galleryimage definition allowing to specify UniqueIdentifier.
*/
interface WithUniqueIdentifier {
/**
* Specifies uniqueIdentifier.
* @param uniqueIdentifier The unique immutable identifier of a resource (Guid)
* @return the next definition stage
*/
WithCreate withUniqueIdentifier(String uniqueIdentifier);
}
/**
* The stage of the definition which contains all the minimum required inputs for
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<GalleryImage>, DefinitionStages.WithIsEnabled, DefinitionStages.WithIsOverride, DefinitionStages.WithIsPlanAuthorized, DefinitionStages.WithLocation, DefinitionStages.WithProvisioningState, DefinitionStages.WithTags, DefinitionStages.WithUniqueIdentifier {
}
}
/**
* The template for a GalleryImage update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<GalleryImage>, UpdateStages.WithIsEnabled, UpdateStages.WithIsOverride, UpdateStages.WithIsPlanAuthorized, UpdateStages.WithLocation, UpdateStages.WithProvisioningState, UpdateStages.WithTags, UpdateStages.WithUniqueIdentifier {
}
/**
* Grouping of GalleryImage update stages.
*/
interface UpdateStages {
/**
* The stage of the galleryimage update allowing to specify IsEnabled.
*/
interface WithIsEnabled {
/**
* Specifies isEnabled.
* @param isEnabled Indicates whether this gallery image is enabled
* @return the next update stage
*/
Update withIsEnabled(Boolean isEnabled);
}
/**
* The stage of the galleryimage update allowing to specify IsOverride.
*/
interface WithIsOverride {
/**
* Specifies isOverride.
* @param isOverride Indicates whether this gallery has been overridden for this lab account
* @return the next update stage
*/
Update withIsOverride(Boolean isOverride);
}
/**
* The stage of the galleryimage update allowing to specify IsPlanAuthorized.
*/
interface WithIsPlanAuthorized {
/**
* Specifies isPlanAuthorized.
* @param isPlanAuthorized Indicates if the plan has been authorized for programmatic deployment
* @return the next update stage
*/
Update withIsPlanAuthorized(Boolean isPlanAuthorized);
}
/**
* The stage of the galleryimage update allowing to specify Location.
*/
interface WithLocation {
/**
* Specifies location.
* @param location The location of the resource
* @return the next update stage
*/
Update withLocation(String location);
}
/**
* The stage of the galleryimage update allowing to specify ProvisioningState.
*/
interface WithProvisioningState {
/**
* Specifies provisioningState.
* @param provisioningState The provisioning status of the resource
* @return the next update stage
*/
Update withProvisioningState(String provisioningState);
}
/**
* The stage of the galleryimage update allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags The tags of the resource
* @return the next update stage
*/
Update withTags(Map<String, String> tags);
}
/**
* The stage of the galleryimage update allowing to specify UniqueIdentifier.
*/
interface WithUniqueIdentifier {
/**
* Specifies uniqueIdentifier.
* @param uniqueIdentifier The unique immutable identifier of a resource (Guid)
* @return the next update stage
*/
Update withUniqueIdentifier(String uniqueIdentifier);
}
}
}
| |
/*
* Copyright (C) 2013 Google Inc.
* Copyright (C) 2013 Square Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.tests.integration.codegen;
import com.google.testing.compile.JavaFileObjects;
import javax.tools.JavaFileObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import static com.google.common.truth.Truth.assertAbout;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
import static com.google.testing.compile.JavaSourcesSubjectFactory.javaSources;
import static dagger.tests.integration.ProcessorTestUtils.daggerProcessors;
import static java.util.Arrays.asList;
@RunWith(JUnit4.class)
public final class ModuleAdapterGenerationTest {
/**
* Shows current behavior for a {@link dagger.Provides provides method}
* used to supply an injected ctor parameter.
*
* <ul>
* <li>{@code ProvidesAdapter} invokes the module's provides method on
* {@code get}</li>
* <li>On {@code getBindings}, the above is newed up and linked to its type
* key.
* <li>{@code InjectAdapter} contains a field for the parameter binding,
* referenced in {@code getDependencies} and set on {@code attach}</li>
* <li>On {@code get}, the injected constructor is called with the value of
* {@link dagger.internal.Binding#get}</li>
* </ul>
*/
@Test public void providerForCtorInjection() {
JavaFileObject sourceFile = JavaFileObjects.forSourceString("Field", ""
+ "import dagger.Module;\n"
+ "import dagger.Provides;\n"
+ "import javax.inject.Inject;\n"
+ "class Field {\n"
+ " static class A { final String name; @Inject A(String name) { this.name = name; }}\n"
+ " @Module(injects = { A.class, String.class })\n"
+ " static class AModule { @Provides String name() { return \"foo\"; }}\n"
+ "}\n"
);
JavaFileObject expectedModuleAdapter =
JavaFileObjects.forSourceString("Field$AModule$$ModuleAdapter", ""
+ "import dagger.internal.BindingsGroup;\n"
+ "import dagger.internal.ModuleAdapter;\n"
+ "import dagger.internal.ProvidesBinding;\n"
+ "import java.lang.Class;\n"
+ "import java.lang.Override;\n"
+ "import java.lang.String;\n"
+ "public final class Field$AModule$$ModuleAdapter\n"
+ " extends ModuleAdapter<Field.AModule> {\n"
+ " private static final String[] INJECTS = \n"
+ " {\"members/Field$A\", \"members/java.lang.String\"};\n"
+ " private static final Class<?>[] STATIC_INJECTIONS = {};\n"
+ " private static final Class<?>[] INCLUDES = {};\n"
+ " public Field$AModule$$ModuleAdapter() {\n"
+ " super(Field.AModule.class, INJECTS, STATIC_INJECTIONS, false, INCLUDES, true, false);\n"
+ " }\n"
+ " @Override public Field.AModule newModule() {\n"
+ " return new Field.AModule();\n"
+ " }\n"
+ " @Override public void getBindings(BindingsGroup bindings, Field.AModule module) {\n"
+ " bindings.contributeProvidesBinding(\"java.lang.String\",\n"
+ " new NameProvidesAdapter(module));\n" // eager new!
+ " }\n"
+ " public static final class NameProvidesAdapter\n" // corresponds to method name
+ " extends ProvidesBinding<String> {\n"
+ " private final Field.AModule module;\n"
+ " public NameProvidesAdapter(Field.AModule module) {\n"
+ " super(\"java.lang.String\", NOT_SINGLETON, \"Field.AModule\", \"name\");\n"
+ " this.module = module;\n"
+ " setLibrary(false);\n"
+ " }\n"
+ " @Override public String get() {\n"
+ " return module.name();\n" // corresponds to @Provides method
+ " }\n"
+ " }\n"
+ "}\n"
);
JavaFileObject expectedInjectAdapter =
JavaFileObjects.forSourceString("Field$A$$InjectAdapter", ""
+ "import dagger.internal.Binding;\n"
+ "import dagger.internal.Linker;\n"
+ "import java.lang.Override;\n"
+ "import java.lang.String;\n"
+ "import java.lang.SuppressWarnings;\n"
+ "import java.util.Set;\n"
+ "public final class Field$A$$InjectAdapter\n"
+ " extends Binding<Field.A> {\n"
+ " private Binding<String> name;\n" // for ctor
+ " public Field$A$$InjectAdapter() {\n"
+ " super(\"Field$A\", \"members/Field$A\", NOT_SINGLETON, Field.A.class);\n"
+ " }\n"
+ " @Override @SuppressWarnings(\"unchecked\")\n"
+ " public void attach(Linker linker) {\n"
+ " name = (Binding<String>)linker.requestBinding(\n" // binding key is not a class
+ " \"java.lang.String\", Field.A.class, getClass().getClassLoader());\n"
+ " }\n"
+ " @Override public void getDependencies(\n"
+ " Set<Binding<?>> getBindings, Set<Binding<?>> injectMembersBindings) {\n"
+ " getBindings.add(name);\n" // name is added to dependencies
+ " }\n"
+ " @Override public Field.A get() {\n"
+ " Field.A result = new Field.A(name.get());\n" // adds ctor param
+ " return result;\n"
+ " }\n"
+ "}\n"
);
assertAbout(javaSource())
.that(sourceFile)
.processedWith(daggerProcessors())
.compilesWithoutError()
.and()
.generatesSources(expectedModuleAdapter, expectedInjectAdapter);
}
@Test public void injectsMembersInjectedAndProvidedAndConstructedTypes() {
JavaFileObject sourceFile = JavaFileObjects.forSourceString("Field", ""
+ "import dagger.Module;\n"
+ "import dagger.Provides;\n"
+ "import javax.inject.Inject;\n"
+ "class Field {\n"
+ " static class A { final String name; @Inject A(String name) { this.name = name; }}\n"
+ " static class B { @Inject String name; }\n"
+ " @Module(injects = { A.class, String.class, B.class })\n"
+ " static class AModule { @Provides String name() { return \"foo\"; }}\n"
+ "}\n"
);
JavaFileObject expectedModuleAdapter =
JavaFileObjects.forSourceString("Field$AModule$$ModuleAdapter", ""
+ "import dagger.internal.BindingsGroup;\n"
+ "import dagger.internal.ModuleAdapter;\n"
+ "import dagger.internal.ProvidesBinding;\n"
+ "import java.lang.Class;\n"
+ "import java.lang.Override;\n"
+ "import java.lang.String;\n"
+ "public final class Field$AModule$$ModuleAdapter extends ModuleAdapter<Field.AModule> {\n"
+ " private static final String[] INJECTS = \n"
+ " {\"members/Field$A\", \"members/java.lang.String\", \"members/Field$B\"};\n"
+ " private static final Class<?>[] STATIC_INJECTIONS = {};\n"
+ " private static final Class<?>[] INCLUDES = {};\n"
+ " public Field$AModule$$ModuleAdapter() {\n"
+ " super(Field.AModule.class, INJECTS, STATIC_INJECTIONS, false, INCLUDES, true, false);\n"
+ " }\n"
+ " @Override public Field.AModule newModule() {\n"
+ " return new Field.AModule();\n"
+ " }\n"
+ " @Override public void getBindings(BindingsGroup bindings, Field.AModule module) {\n"
+ " bindings.contributeProvidesBinding(\"java.lang.String\",\n"
+ " new NameProvidesAdapter(module));\n" // eager new!
+ " }\n"
+ " public static final class NameProvidesAdapter\n" // corresponds to method name
+ " extends ProvidesBinding<String> {\n"
+ " private final Field.AModule module;\n"
+ " public NameProvidesAdapter(Field.AModule module) {\n"
+ " super(\"java.lang.String\", NOT_SINGLETON, \"Field.AModule\", \"name\");\n"
+ " this.module = module;\n"
+ " setLibrary(false);\n"
+ " }\n"
+ " @Override public String get() {\n"
+ " return module.name();\n" // corresponds to @Provides method
+ " }\n"
+ " }\n"
+ "}\n"
);
JavaFileObject expectedInjectAdapterA =
JavaFileObjects.forSourceString("Field$A$$InjectAdapter", ""
+ "import dagger.internal.Binding;\n"
+ "import dagger.internal.Linker;\n"
+ "import java.lang.Override;\n"
+ "import java.lang.String;\n"
+ "import java.lang.SuppressWarnings;\n"
+ "import java.util.Set;\n"
+ "public final class Field$A$$InjectAdapter\n"
+ " extends Binding<Field.A> {\n"
+ " private Binding<String> name;\n" // For Constructor.
+ " public Field$A$$InjectAdapter() {\n"
+ " super(\"Field$A\", \"members/Field$A\", NOT_SINGLETON, Field.A.class);\n"
+ " }\n"
+ " @Override @SuppressWarnings(\"unchecked\")\n"
+ " public void attach(Linker linker) {\n"
+ " name = (Binding<String>)linker.requestBinding(\n"
+ " \"java.lang.String\", Field.A.class, getClass().getClassLoader());\n"
+ " }\n"
+ " @Override public void getDependencies(\n"
+ " Set<Binding<?>> getBindings, Set<Binding<?>> injectMembersBindings) {\n"
+ " getBindings.add(name);\n" // Name is added to dependencies.
+ " }\n"
+ " @Override public Field.A get() {\n"
+ " Field.A result = new Field.A(name.get());\n" // Adds constructor parameter.
+ " return result;\n"
+ " }\n"
+ "}\n"
);
JavaFileObject expectedInjectAdapterB =
JavaFileObjects.forSourceString("Field$B$$InjectAdapter", ""
+ "import dagger.internal.Binding;\n"
+ "import dagger.internal.Linker;\n"
+ "import java.lang.Override;\n"
+ "import java.lang.String;\n"
+ "import java.lang.SuppressWarnings;\n"
+ "import java.util.Set;\n"
+ "public final class Field$B$$InjectAdapter\n"
+ " extends Binding<Field.B> {\n"
+ " private Binding<String> name;\n" // For field.
+ " public Field$B$$InjectAdapter() {\n"
+ " super(\"Field$B\", \"members/Field$B\", NOT_SINGLETON, Field.B.class);\n"
+ " }\n"
+ " @Override @SuppressWarnings(\"unchecked\")\n"
+ " public void attach(Linker linker) {\n"
+ " name = (Binding<String>)linker.requestBinding(\n"
+ " \"java.lang.String\", Field.B.class, getClass().getClassLoader());\n"
+ " }\n"
+ " @Override public void getDependencies(\n"
+ " Set<Binding<?>> getBindings, Set<Binding<?>> injectMembersBindings) {\n"
+ " injectMembersBindings.add(name);\n" // Name is added to dependencies.
+ " }\n"
+ " @Override public Field.B get() {\n"
+ " Field.B result = new Field.B();\n"
+ " injectMembers(result);\n"
+ " return result;\n"
+ " }\n"
+ " @Override public void injectMembers(Field.B object) {\n"
+ " object.name = name.get();\n" // Inject field.
+ " }\n"
+ "}\n"
);
assertAbout(javaSource())
.that(sourceFile)
.processedWith(daggerProcessors())
.compilesWithoutError()
.and()
.generatesSources(expectedModuleAdapter, expectedInjectAdapterA, expectedInjectAdapterB);
}
@Test public void providesHasParameterNamedModule() {
JavaFileObject a = JavaFileObjects.forSourceString("A", ""
+ "import javax.inject.Inject;\n"
+ "class A {\n"
+ " @Inject A(){ }\n"
+ "}\n"
);
JavaFileObject b = JavaFileObjects.forSourceString("B", ""
+ "import javax.inject.Inject;\n"
+ "class B {\n"
+ " @Inject B(){ }\n"
+ "}\n"
);
JavaFileObject module = JavaFileObjects.forSourceString("BModule", ""
+ "import dagger.Module;\n"
+ "import dagger.Provides;\n"
+ "import javax.inject.Inject;\n"
+ "@Module(injects = B.class)\n"
+ "class BModule {\n"
+ " @Provides B b(A module) {\n"
+ " return new B();\n"
+ " }\n"
+ "}\n"
);
assertAbout(javaSources())
.that(asList(a, b, module))
.processedWith(daggerProcessors())
.compilesWithoutError();
}
@Test public void duplicateInjectsFails() {
JavaFileObject module = JavaFileObjects.forSourceString("Test", ""
+ "import dagger.Module;\n"
+ "import dagger.Provides;\n"
+ "import javax.inject.Inject;\n"
+ "class A {}\n"
+ "@Module(injects = { A.class, A.class })\n"
+ "class BModule { }\n"
);
assertAbout(javaSource())
.that(module)
.processedWith(daggerProcessors())
.failsToCompile()
.withErrorContaining("'injects' list contains duplicate entries: [A]")
.in(module).onLine(6);
}
@Test public void duplicateIncludesFails() {
JavaFileObject module = JavaFileObjects.forSourceString("Test", ""
+ "import dagger.Module;\n"
+ "import dagger.Provides;\n"
+ "import javax.inject.Inject;\n"
+ "@Module\n"
+ "class AModule {}\n"
+ "@Module(includes = { AModule.class, AModule.class })\n"
+ "class BModule { }\n"
);
assertAbout(javaSource())
.that(module)
.processedWith(daggerProcessors())
.failsToCompile()
.withErrorContaining("'includes' list contains duplicate entries: [AModule]")
.in(module).onLine(7);
}
}
| |
package com.gmail.jacquesmit97.librarysystem.gui;
import com.gmail.jacquesmit97.librarysystem.Book;
import com.gmail.jacquesmit97.librarysystem.Student;
import com.gmail.jacquesmit97.librarysystem.Transaction;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
/**
* Allows the user to create {@link Transaction} object with the information that is provided.
*/
public class TransactionCreator extends JDialog implements TransactionContract.View {
private JPanel contentPane;
private JButton buttonOK;
private JButton buttonCancel;
private JLabel referenceNumberField;
private JLabel creationDateField;
private JComboBox studentCombo;
private JTextField studentNameInput;
private JTextField titleInput;
private JTextField surnameInput;
private JTextField cellInput;
private JTextField addressInput;
private JButton newStudentButton;
private JList booksAvailable;
private JList booksInTransaction;
private JButton addBook;
private JButton removeBook;
private JButton removeAllBooks;
private JTextField bookIsbn;
private JTextField bookTitle;
private JTextField bookAuthor;
private JTextField bookPublishedDate;
private JTextField bookWrittenYear;
private JTextField bookEdition;
private JTextField bookCategory;
private JTextField bookPublisher;
private JTextField bookAvailableCopies;
private TransactionContract.Presenter presenter;
/**
* The transaction this dialog will represent.
*/
private Transaction transaction;
/**
* All the students in the database.
*/
private List<Student> students;
/**
* All the books in the database.
*/
private List<Book> books;
/**
* Creates a new <code>TransactionCreator</code> object bound to the given <code>parent</code>
*
* @param parent The parent of this dialog.
*/
public TransactionCreator(Frame parent, TransactionContract.Presenter presenter) {
super(parent, "New Transaction", true);
setContentPane(contentPane);
getRootPane().setDefaultButton(buttonOK);
// Initialize components and update components that cannot be changed
this.presenter = presenter;
presenter.setView(this);
SimpleDateFormat sdf = new SimpleDateFormat("YYYY/MM/dd");
transaction = new Transaction();
referenceNumberField.setText("Reference number: " + transaction.getReferenceNumber());
creationDateField.setText("Creation date: " + sdf.format(transaction.getBookingDate()));
this.students = presenter.getStudents();
this.books = presenter.getBooks();
update();
updateAvailableBooks(books);
updateBooksInTransaction();
////////////////////
buttonOK.addActionListener(e -> onOK());
buttonCancel.addActionListener(e -> onCancel());
// call onCancel() when cross is clicked
setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
onCancel();
}
});
// call onCancel() on ESCAPE
contentPane.registerKeyboardAction(new ActionListener() {
public void actionPerformed(ActionEvent e) {
onCancel();
}
}, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
studentCombo.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
Student student = students.get(studentCombo.getSelectedIndex());
updateStudentInformation(student);
transaction.setStudent(student);
}
});
addBook.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
Book book = books.get(booksAvailable.getSelectedIndex());
addBookToTransaction(book);
} catch (ArrayIndexOutOfBoundsException ignored) {
JOptionPane.showMessageDialog(TransactionCreator.this,
"Please select a book before adding to the transaction",
"No book selected",
JOptionPane.WARNING_MESSAGE);
}
}
});
removeBook.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
Book book = transaction.getBooks().get(booksInTransaction.getSelectedIndex());
removeBookFromTransaction(book);
} catch (ArrayIndexOutOfBoundsException ignored) {
JOptionPane.showMessageDialog(TransactionCreator.this,
"Please select a book before removing from the transaction",
"No book selected",
JOptionPane.WARNING_MESSAGE);
}
}
});
booksAvailable.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
try {
Book selectedBook = books.get(booksAvailable.getSelectedIndex());
showBookInformation(selectedBook);
} catch (ArrayIndexOutOfBoundsException ignored) {
}
}
});
booksInTransaction.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
try {
Book selectedBook = transaction.getBooks().get(booksInTransaction.getSelectedIndex());
showBookInformation(selectedBook);
} catch (ArrayIndexOutOfBoundsException ignored) {
}
}
});
removeAllBooks.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
removeAllBooksFromTransaction();
}
});
Student student = students.get(studentCombo.getSelectedIndex());
updateStudentInformation(student);
pack();
setLocationRelativeTo(parent);
setVisible(true);
}
/**
* Shows the information about the given book.
*
* @param book The book to show information about.
*/
private void showBookInformation(Book book) {
bookIsbn.setText(book.getIsbn());
bookAuthor.setText(book.getAuthor());
bookAvailableCopies.setText(Integer.toString(book.getCopiesInLibrary()));
bookCategory.setText(book.getCategory());
bookPublishedDate.setText(book.getYearPublished());
bookWrittenYear.setText(book.getYearWritten());
bookEdition.setText(book.getEdition());
bookPublisher.setText(book.getPublisher());
bookTitle.setText(book.getTitle());
}
/**
* Updates the GUI information the user can see.
*
* @param student The student whose information is to be displayed.
*/
private void updateStudentInformation(Student student) {
studentNameInput.setText(student.getName());
surnameInput.setText(student.getSurname());
titleInput.setText(student.getTitle());
addressInput.setText(student.getAddress());
cellInput.setText(student.getCell());
}
private void updateAvailableBooks(List<Book> books) {
booksAvailable.removeAll();
ListModel<String> listModel = new AbstractListModel<String>() {
@Override
public int getSize() {
return books.size();
}
@Override
public String getElementAt(int index) {
return books.get(index).getIsbn();
}
};
booksAvailable.setModel(listModel);
booksAvailable.revalidate();
booksAvailable.setBackground(Color.WHITE);
booksAvailable.repaint();
}
/**
* Updates the {@link JList} that shows all the books that is currently added to the transaction.
*/
private void updateBooksInTransaction() {
List<Book> books = transaction.getBooks();
ListModel<String> listModel = new AbstractListModel<String>() {
@Override
public int getSize() {
return books.size();
}
@Override
public String getElementAt(int index) {
return books.get(index).getIsbn();
}
};
booksInTransaction.setModel(listModel);
booksInTransaction.revalidate();
booksInTransaction.setBackground(Color.WHITE);
booksInTransaction.repaint();
}
/**
* Adds the given book to the transaction.
*
* @param book The book to add to the transaction.
*/
private void addBookToTransaction(Book book) {
transaction.addBook(book);
books.remove(book);
updateAvailableBooks(books);
updateBooksInTransaction();
}
/**
* Removes the given book from the transaction.
*
* @param book The book to remove from the transaction.
*/
private void removeBookFromTransaction(Book book) {
books.add(book);
transaction.removeBook(book);
updateAvailableBooks(books);
updateBooksInTransaction();
}
/**
* Removes all the books from the transaction.
*/
private void removeAllBooksFromTransaction() {
List<Book> transactionBooks = transaction.getBooks();
books.addAll(transactionBooks);
transaction.setBooks(new ArrayList<>());
updateAvailableBooks(books);
updateBooksInTransaction();
}
/**
* Updates the <code>students</code> and <code>books</code> that is accessible in the GUI.
*/
private void update() {
studentCombo.removeAllItems();
if (students != null) {
for (Student student : students) {
studentCombo.addItem(student.getStudentNumber());
}
}
}
/**
* Updates and returns the current {@link Transaction} object represented by this dialog.
*
* @return The transaction object represented by this dialog.
*/
@Override
public Transaction getTransaction() {
return transaction;
}
private void onOK() {
// add your code here
dispose();
}
private void onCancel() {
transaction = null;
dispose();
}
}
| |
/*
Copyright 2015 Actian Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.actian.services.dataflow.operators;
import static com.pervasive.datarush.types.TokenTypeConstant.STRING;
import static com.pervasive.datarush.types.TokenTypeConstant.record;
import java.io.StringReader;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.actian.services.dataflow.functions.evaluators.XPath;
import com.pervasive.datarush.DRException;
import com.pervasive.datarush.annotations.OperatorDescription;
import com.pervasive.datarush.annotations.PortDescription;
import com.pervasive.datarush.annotations.PropertyDescription;
import com.pervasive.datarush.operators.ExecutableOperator;
import com.pervasive.datarush.operators.ExecutionContext;
import com.pervasive.datarush.operators.RecordPipelineOperator;
import com.pervasive.datarush.operators.StreamingMetadataContext;
import com.pervasive.datarush.ports.physical.RecordInput;
import com.pervasive.datarush.ports.physical.RecordOutput;
import com.pervasive.datarush.ports.physical.StringInputField;
import com.pervasive.datarush.ports.record.RecordPort;
import com.pervasive.datarush.schema.RecordTextSchema;
import com.pervasive.datarush.tokens.scalar.StringToken;
import com.pervasive.datarush.types.RecordTokenType;
import com.pervasive.datarush.types.TokenTypeConstant;
import com.pervasive.datarush.types.TypeUtil;
import java.io.IOException;
import java.util.Iterator;
import javax.xml.XMLConstants;
import javax.xml.namespace.NamespaceContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.annotate.JsonProperty;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
@JsonAutoDetect(JsonMethod.NONE)
@OperatorDescription("Parse and extract data from XML input.")
public class XPathTable extends ExecutableOperator implements RecordPipelineOperator {
private static final String CHILD_XML = "_childXML_";
private static final String NODE_XML = "_nodeXML_";
private static final String SOURCE_XML = "_sourceXML_";
private final RecordPort input = newRecordInput("input");
private final RecordPort output = newRecordOutput("output");
private RecordTextSchema<?> schema;
private String expression;
private boolean includeChildXML = false;
private boolean includeNodeXML = false;
private boolean includeSourceXML = true;
private String inputField;
private XPathFactory xpathFactory;
private DocumentBuilderFactory documentBuilderFactory;
public XPathTable() {
Thread t = Thread.currentThread();
ClassLoader cl = t.getContextClassLoader();
t.setContextClassLoader(getClass().getClassLoader());
try {
xpathFactory = XPathFactory.newInstance();
documentBuilderFactory = DocumentBuilderFactory.newInstance();
} finally {
t.setContextClassLoader(cl);
}
}
@Override
protected void execute(ExecutionContext ctx) {
RecordInput inputRec = (RecordInput) ctx.getInputPort(getInput());
RecordOutput outputRec = (RecordOutput) ctx.getOutputPort(getOutput());
while (inputRec.stepNext()) {
javax.xml.xpath.XPath xpath = xpathFactory.newInstance().newXPath();
InputSource source = new InputSource(new StringReader(((StringInputField) inputRec.getField(inputField)).asString()));
try {
DocumentBuilder db = documentBuilderFactory.newDocumentBuilder();
Document dom = db.parse(source);
xpath.setNamespaceContext(new UniversalNamespaceResolver(dom));
NodeList nodes = (NodeList) xpath.evaluate(expression, dom, XPathConstants.NODESET);
for (int i = 0; i < nodes.getLength(); i++) {
// Output attribute values, and text value.
Node n = nodes.item(i);
// Loop through the schema
for (String fieldName : schema.getFieldNames()) {
outputRec.getField(fieldName).setNull();
if (n.hasAttributes() && fieldName.startsWith("@")) {
// Test for attribute with the name.
Node attr = n.getAttributes().getNamedItem(fieldName.substring(1));
if (attr != null) {
outputRec.getField(fieldName).set(new StringToken(attr.getNodeValue()));
}
} else if (n.hasChildNodes()){
Node child = n.getFirstChild();
while(child != null){
if (child.getNodeName().equals(fieldName)) {
outputRec.getField(fieldName).set(StringToken.parse(child.getTextContent()));
break;
}
child = child.getNextSibling();
}
}
}
if (isIncludeChildXML()) {
if (n.hasChildNodes()) {// Output child XML
NodeList list = n.getChildNodes();
String xmlValue = "";
for (int j = 0; j < list.getLength(); j++) {
xmlValue += XPath.printNode(list.item(j));
}
outputRec.getField(CHILD_XML).set(new StringToken(xmlValue));
} else {
outputRec.getField(CHILD_XML).setNull();
}
}
if (isIncludeNodeXML()) {
outputRec.getField(NODE_XML).set(new StringToken(XPath.printNode(n)));
}
if (isIncludeSourceXML()) {
outputRec.getField(SOURCE_XML).set(inputRec.getField(inputField));
}
outputRec.push();
}
} catch (XPathExpressionException ex) {
throw new DRException("Error executing expression.",ex);
} catch (ParserConfigurationException ex) {
throw new DRException("Error creating document builder.", ex);
} catch (SAXException ex) {
throw new DRException("Error parsing XML source.", ex);
} catch (IOException ex) {
throw new DRException("Error reading XML source.", ex);
}
}
outputRec.pushEndOfData();
}
@Override
protected void computeMetadata(StreamingMetadataContext ctx) {
RecordTokenType s = this.schema.getTokenType();
if (inputField == null || inputField.isEmpty()) {
throw new DRException("Please specify an input field.");
}
if (input.getType(ctx).get(inputField) == null) {
throw new DRException("Input does not contain a field named '" + inputField + "'");
}
if (!input.getType(ctx).get(inputField).getType().equals(TokenTypeConstant.STRING)) {
throw new DRException("Field '" + inputField + "' has to be a string field.");
}
if (isIncludeChildXML()) {
s = TypeUtil.merge(s, record(STRING(CHILD_XML)));
}
if (isIncludeNodeXML()) {
s = TypeUtil.merge(s, record(STRING(NODE_XML)));
}
if (isIncludeSourceXML()) {
s = TypeUtil.merge(s, record(STRING(SOURCE_XML)));
}
if (s.isEmpty()) {
throw new DRException("No fields in output!");
}
output.setType(ctx, s);
}
@PortDescription("XML data.")
public RecordPort getInput() {
return input;
}
@PortDescription("Parsed XML data.")
public RecordPort getOutput() {
return output;
}
@JsonProperty
@PropertyDescription("Record schema of the target data")
public RecordTextSchema<?> getSchema() {
return schema;
}
public void setSchema(RecordTextSchema<?> schema) {
this.schema = schema;
}
@JsonProperty
@PropertyDescription("XPath expression")
public String getExpression() {
return expression;
}
public void setExpression(String expression) {
this.expression = expression;
}
@JsonProperty("includeChildXML")
@PropertyDescription("Output child XML")
public boolean isIncludeChildXML() {
return includeChildXML;
}
@JsonProperty
public void setIncludeChildXML(boolean includeChildXML) {
this.includeChildXML = includeChildXML;
}
@JsonProperty("includeNodeXML")
@PropertyDescription("Output node XML")
public boolean isIncludeNodeXML() {
return includeNodeXML;
}
@JsonProperty("includeNodeXML")
public void setIncludeNodeXML(boolean includeNodeXML) {
this.includeNodeXML = includeNodeXML;
}
@JsonProperty("includeSourceXML")
@PropertyDescription("Output source XML")
public boolean isIncludeSourceXML() {
return includeSourceXML;
}
@JsonProperty("includeSourceXML")
public void setIncludeSourceXML(boolean includeSourceXML) {
this.includeSourceXML = includeSourceXML;
}
@JsonProperty
@PropertyDescription("Input Field containing the XML")
public String getInputField() {
return inputField;
}
public void setInputField(String inputField) {
this.inputField = inputField;
}
// The following is from the article "Using the Java language NamespaceContext object with XPath"
// http://www.ibm.com/developerworks/library/x-nmspccontext/
private final static class UniversalNamespaceResolver implements NamespaceContext {
private Document sourceDocument;
public UniversalNamespaceResolver(Document document) {
sourceDocument = document;
}
@Override
public String getNamespaceURI(String prefix) {
if (prefix.equals(XMLConstants.DEFAULT_NS_PREFIX)) {
return sourceDocument.lookupNamespaceURI(null);
} else {
return sourceDocument.lookupNamespaceURI(prefix);
}
}
@Override
public String getPrefix(String namespaceURI) {
return sourceDocument.lookupPrefix(namespaceURI);
}
@Override
public Iterator<?> getPrefixes(String namespaceURI) {
return null;
}
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2019.01.11 at 02:39:34 PM EST
//
package schemas.docbook;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://docbook.org/ns/docbook}info" minOccurs="0"/>
* </sequence>
* <attGroup ref="{http://docbook.org/ns/docbook}db.common.attributes"/>
* <attribute name="role" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="format" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="fileref" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="entityref" type="{http://www.w3.org/2001/XMLSchema}ENTITY" />
* <attribute name="align">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}token">
* <enumeration value="center"/>
* <enumeration value="char"/>
* <enumeration value="justify"/>
* <enumeration value="left"/>
* <enumeration value="right"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="valign">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}token">
* <enumeration value="bottom"/>
* <enumeration value="middle"/>
* <enumeration value="top"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="width" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="contentwidth" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="scalefit">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}token">
* <enumeration value="0"/>
* <enumeration value="1"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="scale" type="{http://www.w3.org/2001/XMLSchema}NMTOKEN" />
* <attribute name="depth" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="contentdepth" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"info"
})
@XmlRootElement(name = "imagedata")
public class Imagedata {
protected Info info;
@XmlAttribute(name = "role")
@XmlSchemaType(name = "anySimpleType")
protected String role;
@XmlAttribute(name = "format")
@XmlSchemaType(name = "anySimpleType")
protected String format;
@XmlAttribute(name = "fileref")
@XmlSchemaType(name = "anySimpleType")
protected String fileref;
@XmlAttribute(name = "entityref")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "ENTITY")
protected String entityref;
@XmlAttribute(name = "align")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String align;
@XmlAttribute(name = "valign")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String valign;
@XmlAttribute(name = "width")
@XmlSchemaType(name = "anySimpleType")
protected String width;
@XmlAttribute(name = "contentwidth")
@XmlSchemaType(name = "anySimpleType")
protected String contentwidth;
@XmlAttribute(name = "scalefit")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String scalefit;
@XmlAttribute(name = "scale")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String scale;
@XmlAttribute(name = "depth")
@XmlSchemaType(name = "anySimpleType")
protected String depth;
@XmlAttribute(name = "contentdepth")
@XmlSchemaType(name = "anySimpleType")
protected String contentdepth;
@XmlAttribute(name = "id", namespace = "http://www.w3.org/XML/1998/namespace")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(name = "version")
@XmlSchemaType(name = "anySimpleType")
protected String commonVersion;
@XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace")
@XmlSchemaType(name = "anySimpleType")
protected String xmlLang;
@XmlAttribute(name = "base", namespace = "http://www.w3.org/XML/1998/namespace")
@XmlSchemaType(name = "anySimpleType")
protected String base;
@XmlAttribute(name = "remap")
@XmlSchemaType(name = "anySimpleType")
protected String remap;
@XmlAttribute(name = "xreflabel")
@XmlSchemaType(name = "anySimpleType")
protected String xreflabel;
@XmlAttribute(name = "revisionflag")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String revisionflag;
@XmlAttribute(name = "dir")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String dir;
@XmlAttribute(name = "arch")
@XmlSchemaType(name = "anySimpleType")
protected String arch;
@XmlAttribute(name = "audience")
@XmlSchemaType(name = "anySimpleType")
protected String audience;
@XmlAttribute(name = "condition")
@XmlSchemaType(name = "anySimpleType")
protected String condition;
@XmlAttribute(name = "conformance")
@XmlSchemaType(name = "anySimpleType")
protected String conformance;
@XmlAttribute(name = "os")
@XmlSchemaType(name = "anySimpleType")
protected String os;
@XmlAttribute(name = "revision")
@XmlSchemaType(name = "anySimpleType")
protected String commonRevision;
@XmlAttribute(name = "security")
@XmlSchemaType(name = "anySimpleType")
protected String security;
@XmlAttribute(name = "userlevel")
@XmlSchemaType(name = "anySimpleType")
protected String userlevel;
@XmlAttribute(name = "vendor")
@XmlSchemaType(name = "anySimpleType")
protected String vendor;
@XmlAttribute(name = "wordsize")
@XmlSchemaType(name = "anySimpleType")
protected String wordsize;
@XmlAttribute(name = "annotations")
@XmlSchemaType(name = "anySimpleType")
protected String annotations;
/**
* Gets the value of the info property.
*
* @return
* possible object is
* {@link Info }
*
*/
public Info getInfo() {
return info;
}
/**
* Sets the value of the info property.
*
* @param value
* allowed object is
* {@link Info }
*
*/
public void setInfo(Info value) {
this.info = value;
}
/**
* Gets the value of the role property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRole() {
return role;
}
/**
* Sets the value of the role property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRole(String value) {
this.role = value;
}
/**
* Gets the value of the format property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFormat() {
return format;
}
/**
* Sets the value of the format property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFormat(String value) {
this.format = value;
}
/**
* Gets the value of the fileref property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFileref() {
return fileref;
}
/**
* Sets the value of the fileref property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFileref(String value) {
this.fileref = value;
}
/**
* Gets the value of the entityref property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEntityref() {
return entityref;
}
/**
* Sets the value of the entityref property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEntityref(String value) {
this.entityref = value;
}
/**
* Gets the value of the align property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAlign() {
return align;
}
/**
* Sets the value of the align property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAlign(String value) {
this.align = value;
}
/**
* Gets the value of the valign property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValign() {
return valign;
}
/**
* Sets the value of the valign property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValign(String value) {
this.valign = value;
}
/**
* Gets the value of the width property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getWidth() {
return width;
}
/**
* Sets the value of the width property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setWidth(String value) {
this.width = value;
}
/**
* Gets the value of the contentwidth property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContentwidth() {
return contentwidth;
}
/**
* Sets the value of the contentwidth property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContentwidth(String value) {
this.contentwidth = value;
}
/**
* Gets the value of the scalefit property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getScalefit() {
return scalefit;
}
/**
* Sets the value of the scalefit property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setScalefit(String value) {
this.scalefit = value;
}
/**
* Gets the value of the scale property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getScale() {
return scale;
}
/**
* Sets the value of the scale property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setScale(String value) {
this.scale = value;
}
/**
* Gets the value of the depth property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDepth() {
return depth;
}
/**
* Sets the value of the depth property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDepth(String value) {
this.depth = value;
}
/**
* Gets the value of the contentdepth property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContentdepth() {
return contentdepth;
}
/**
* Sets the value of the contentdepth property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContentdepth(String value) {
this.contentdepth = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the commonVersion property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCommonVersion() {
return commonVersion;
}
/**
* Sets the value of the commonVersion property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCommonVersion(String value) {
this.commonVersion = value;
}
/**
* Gets the value of the xmlLang property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getXmlLang() {
return xmlLang;
}
/**
* Sets the value of the xmlLang property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setXmlLang(String value) {
this.xmlLang = value;
}
/**
* Gets the value of the base property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBase() {
return base;
}
/**
* Sets the value of the base property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBase(String value) {
this.base = value;
}
/**
* Gets the value of the remap property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRemap() {
return remap;
}
/**
* Sets the value of the remap property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRemap(String value) {
this.remap = value;
}
/**
* Gets the value of the xreflabel property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getXreflabel() {
return xreflabel;
}
/**
* Sets the value of the xreflabel property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setXreflabel(String value) {
this.xreflabel = value;
}
/**
* Gets the value of the revisionflag property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRevisionflag() {
return revisionflag;
}
/**
* Sets the value of the revisionflag property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRevisionflag(String value) {
this.revisionflag = value;
}
/**
* Gets the value of the dir property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDir() {
return dir;
}
/**
* Sets the value of the dir property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDir(String value) {
this.dir = value;
}
/**
* Gets the value of the arch property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getArch() {
return arch;
}
/**
* Sets the value of the arch property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setArch(String value) {
this.arch = value;
}
/**
* Gets the value of the audience property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAudience() {
return audience;
}
/**
* Sets the value of the audience property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAudience(String value) {
this.audience = value;
}
/**
* Gets the value of the condition property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCondition() {
return condition;
}
/**
* Sets the value of the condition property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCondition(String value) {
this.condition = value;
}
/**
* Gets the value of the conformance property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getConformance() {
return conformance;
}
/**
* Sets the value of the conformance property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setConformance(String value) {
this.conformance = value;
}
/**
* Gets the value of the os property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOs() {
return os;
}
/**
* Sets the value of the os property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOs(String value) {
this.os = value;
}
/**
* Gets the value of the commonRevision property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCommonRevision() {
return commonRevision;
}
/**
* Sets the value of the commonRevision property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCommonRevision(String value) {
this.commonRevision = value;
}
/**
* Gets the value of the security property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSecurity() {
return security;
}
/**
* Sets the value of the security property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSecurity(String value) {
this.security = value;
}
/**
* Gets the value of the userlevel property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUserlevel() {
return userlevel;
}
/**
* Sets the value of the userlevel property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUserlevel(String value) {
this.userlevel = value;
}
/**
* Gets the value of the vendor property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getVendor() {
return vendor;
}
/**
* Sets the value of the vendor property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setVendor(String value) {
this.vendor = value;
}
/**
* Gets the value of the wordsize property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getWordsize() {
return wordsize;
}
/**
* Sets the value of the wordsize property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setWordsize(String value) {
this.wordsize = value;
}
/**
* Gets the value of the annotations property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAnnotations() {
return annotations;
}
/**
* Sets the value of the annotations property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAnnotations(String value) {
this.annotations = value;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.common.util.collections;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import lombok.Cleanup;
import org.apache.pulsar.common.util.collections.GrowablePriorityLongPairQueue.LongPair;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
public class GrowablePriorityLongPairQueueTest {
@Test
public void testConstructor() {
try {
new GrowablePriorityLongPairQueue(0);
fail("should have thrown exception");
} catch (IllegalArgumentException e) {
// ok
}
}
@Test
public void simpleInsertions() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue(16);
assertTrue(queue.isEmpty());
queue.add(1, 1);
assertFalse(queue.isEmpty());
queue.add(2, 2);
queue.add(3, 3);
assertEquals(queue.size(), 3);
assertEquals(queue.size(), 3);
assertTrue(queue.remove(1, 1));
assertEquals(queue.size(), 2);
assertEquals(queue.size(), 2);
queue.add(1, 1);
assertEquals(queue.size(), 3);
queue.add(1, 1);
assertEquals(queue.size(), 4);
}
@Test
public void testRemove() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue();
assertTrue(queue.isEmpty());
queue.add(1, 1);
assertFalse(queue.isEmpty());
assertFalse(queue.remove(1, 0));
assertFalse(queue.isEmpty());
assertTrue(queue.remove(1, 1));
assertTrue(queue.isEmpty());
}
@Test
public void testExpandQueue() {
int n = 16;
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue(n / 2);
assertEquals(queue.capacity(), n / 2);
assertEquals(queue.size(), 0);
for (int i = 0; i < n; i++) {
queue.add(i, 1);
}
assertEquals(queue.capacity(), n);
assertEquals(queue.size(), n);
}
@Test
public void testExpandRemoval() {
int n = 16;
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue(n / 2);
assertEquals(queue.capacity(), n / 2);
assertEquals(queue.size(), 0);
int insertItems = 1000 * n;
for (int i = 0; i < insertItems; i++) {
queue.add(i, -1);
}
int newSize = (int) Math.pow(2, 32 - Integer.numberOfLeadingZeros(insertItems - 1));
assertEquals(queue.capacity(), newSize);
assertEquals(queue.size(), insertItems);
Set<LongPair> pairs = new HashSet<>();
queue.forEach((first, second) -> {
pairs.add(new LongPair(first, second));
});
pairs.forEach(pair -> queue.remove(pair.first, -1));
assertEquals(queue.capacity(), newSize);
assertEquals(queue.size(), 0);
}
@Test
public void testExpandWithDeletes() {
int n = 16;
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue(n / 2);
assertEquals(queue.capacity(), n / 2);
assertEquals(queue.size(), 0);
for (int i = 0; i < n / 2; i++) {
queue.add(i, i);
}
for (int i = 0; i < n / 2; i++) {
assertTrue(queue.remove(i, i));
}
assertEquals(queue.capacity(), n / 2);
assertEquals(queue.size(), 0);
for (int i = n; i < (n); i++) {
queue.add(i, i);
}
assertEquals(queue.capacity(), n / 2);
assertEquals(queue.size(), 0);
}
@Test
public void concurrentInsertions() throws Throwable {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue();
@Cleanup("shutdownNow")
ExecutorService executor = Executors.newCachedThreadPool();
final int nThreads = 16;
final int N = 100_000;
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < nThreads; i++) {
final int threadIdx = i;
futures.add(executor.submit(() -> {
Random random = new Random();
for (int j = 0; j < N; j++) {
long key = random.nextLong();
// Ensure keys are unique
key -= key % (threadIdx + 1);
key = Math.abs(key);
queue.add(key, key);
}
}));
}
for (Future<?> future : futures) {
future.get();
}
assertEquals(queue.size(), N * nThreads);
}
@Test
public void concurrentInsertionsAndReads() throws Throwable {
GrowablePriorityLongPairQueue map = new GrowablePriorityLongPairQueue();
@Cleanup("shutdownNow")
ExecutorService executor = Executors.newCachedThreadPool();
final int nThreads = 16;
final int N = 100_000;
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < nThreads; i++) {
final int threadIdx = i;
futures.add(executor.submit(() -> {
Random random = new Random();
for (int j = 0; j < N; j++) {
long key = random.nextLong();
// Ensure keys are unique
key -= key % (threadIdx + 1);
key = Math.abs(key);
map.add(key, key);
}
}));
}
for (Future<?> future : futures) {
future.get();
}
assertEquals(map.size(), N * nThreads);
}
@Test
public void testIteration() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue();
assertEquals(queue.items(), Collections.emptyList());
queue.add(0l, 0l);
assertEquals(new LongPair(0l, 0l), queue.items().iterator().next());
queue.remove(0l, 0l);
assertEquals(queue.items(), Collections.emptyList());
queue.add(0l, 0l);
queue.add(1l, 1l);
queue.add(2l, 2l);
List<LongPair> values = new ArrayList<>(queue.items());
values.sort(null);
assertEquals(values, Lists.newArrayList(new LongPair(0, 0), new LongPair(1, 1), new LongPair(2, 2)));
queue.clear();
assertTrue(queue.isEmpty());
}
@Test
public void testRemoval() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue();
queue.add(0, 0);
queue.add(1, 1);
queue.add(3, 3);
queue.add(6, 6);
queue.add(7, 7);
List<LongPair> values = new ArrayList<>(queue.items());
values.sort(null);
assertEquals(values, Lists.newArrayList(new LongPair(0, 0), new LongPair(1, 1), new LongPair(3, 3),
new LongPair(6, 6), new LongPair(7, 7)));
List<LongPair> removeList = new ArrayList<>();
queue.forEach((first, second) -> {
System.out.println(first + "," + second);
if (first < 5) {
removeList.add(new LongPair(first, second));
}
});
removeList.forEach((pair) -> queue.remove(pair.first, pair.second));
assertEquals(queue.size(), values.size() - 3);
values = new ArrayList<>(queue.items());
values.sort(null);
assertEquals(values, Lists.newArrayList(new LongPair(6, 6), new LongPair(7, 7)));
}
@Test
public void testIfRemoval() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue();
queue.add(0, 0);
queue.add(1, 1);
queue.add(3, 3);
queue.add(6, 6);
queue.add(7, 7);
List<LongPair> values = new ArrayList<>(queue.items());
values.sort(null);
assertEquals(values, Lists.newArrayList(new LongPair(0, 0), new LongPair(1, 1), new LongPair(3, 3),
new LongPair(6, 6), new LongPair(7, 7)));
int removeItems = queue.removeIf((first, second) -> first < 5);
assertEquals(3, removeItems);
assertEquals(queue.size(), values.size() - 3);
values = new ArrayList<>(queue.items());
values.sort(null);
assertEquals(values, Lists.newArrayList(new LongPair(6, 6), new LongPair(7, 7)));
}
@Test
public void testItems() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue();
int n = 100;
int limit = 10;
for (int i = 0; i < n; i++) {
queue.add(i, i);
}
Set<LongPair> items = queue.items();
Set<LongPair> limitItems = queue.items(limit);
assertEquals(items.size(), n);
assertEquals(limitItems.size(), limit);
int totalRemovedItems = queue.removeIf((first, second) -> limitItems.contains((new LongPair(first, second))));
assertEquals(limitItems.size(), totalRemovedItems);
assertEquals(queue.size(), n - limit);
}
@Test
public void testEqualsObjects() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue();
long t1 = 1;
long t2 = 2;
long t1_b = 1;
assertEquals(t1, t1_b);
assertNotEquals(t2, t1);
assertNotEquals(t2, t1_b);
queue.add(t1, t1);
assertTrue(queue.remove(t1_b, t1_b));
}
@Test
public void testInsertAndRemove() throws Exception {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue(8);
queue.add(10, 10);
queue.add(10, 4);
queue.add(10, 5);
queue.add(8, 10);
queue.add(3, 15);
queue.add(23, 15);
queue.add(1, 155);
queue.add(1, 155);
queue.add(3, 15);
queue.add(33, 1);
assertEquals(queue.remove(), new LongPair(1, 155));
assertEquals(queue.remove(), new LongPair(1, 155));
assertEquals(queue.remove(), new LongPair(3, 15));
assertEquals(queue.remove(), new LongPair(3, 15));
assertEquals(queue.remove(), new LongPair(8, 10));
assertEquals(queue.remove(), new LongPair(10, 4));
assertEquals(queue.remove(), new LongPair(10, 5));
assertEquals(queue.remove(), new LongPair(10, 10));
assertEquals(queue.remove(), new LongPair(23, 15));
assertEquals(queue.remove(), new LongPair(33, 1));
}
@Test
public void testSetWithDuplicateInsert() {
GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue(1);
assertTrue(queue.isEmpty());
queue.add(20, 20);
queue.add(12, 12);
queue.add(14, 14);
queue.add(6, 6);
queue.add(1, 1);
queue.add(7, 7);
queue.add(2, 2);
queue.add(3, 3);
assertTrue(queue.exists(7, 7));
assertFalse(queue.exists(7, 1));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.shiro.realm;
import org.apache.shiro.authc.credential.CredentialsMatcher;
import org.apache.shiro.authz.*;
import org.apache.shiro.authz.permission.*;
import org.apache.shiro.cache.Cache;
import org.apache.shiro.cache.CacheManager;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.util.CollectionUtils;
import org.apache.shiro.lang.util.Initializable;
import org.apache.shiro.lang.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* An {@code AuthorizingRealm} extends the {@code AuthenticatingRealm}'s capabilities by adding Authorization
* (access control) support.
* <p/>
* This implementation will perform all role and permission checks automatically (and subclasses do not have to
* write this logic) as long as the
* {@link #getAuthorizationInfo(org.apache.shiro.subject.PrincipalCollection)} method returns an
* {@link AuthorizationInfo}. Please see that method's JavaDoc for an in-depth explanation.
* <p/>
* If you find that you do not want to utilize the {@link AuthorizationInfo AuthorizationInfo} construct,
* you are of course free to subclass the {@link AuthenticatingRealm AuthenticatingRealm} directly instead and
* implement the remaining Realm interface methods directly. You might do this if you want have better control
* over how the Role and Permission checks occur for your specific data source. However, using AuthorizationInfo
* (and its default implementation {@link org.apache.shiro.authz.SimpleAuthorizationInfo SimpleAuthorizationInfo}) is sufficient in the large
* majority of Realm cases.
*
* @see org.apache.shiro.authz.SimpleAuthorizationInfo
* @since 0.2
*/
public abstract class AuthorizingRealm extends AuthenticatingRealm
implements Authorizer, Initializable, PermissionResolverAware, RolePermissionResolverAware {
//TODO - complete JavaDoc
/*-------------------------------------------
| C O N S T A N T S |
============================================*/
private static final Logger log = LoggerFactory.getLogger(AuthorizingRealm.class);
/**
* The default suffix appended to the realm name for caching AuthorizationInfo instances.
*/
private static final String DEFAULT_AUTHORIZATION_CACHE_SUFFIX = ".authorizationCache";
private static final AtomicInteger INSTANCE_COUNT = new AtomicInteger();
/*-------------------------------------------
| I N S T A N C E V A R I A B L E S |
============================================*/
/**
* The cache used by this realm to store AuthorizationInfo instances associated with individual Subject principals.
*/
private boolean authorizationCachingEnabled;
private Cache<Object, AuthorizationInfo> authorizationCache;
private String authorizationCacheName;
private PermissionResolver permissionResolver;
private RolePermissionResolver permissionRoleResolver;
/*-------------------------------------------
| C O N S T R U C T O R S |
============================================*/
public AuthorizingRealm() {
this(null, null);
}
public AuthorizingRealm(CacheManager cacheManager) {
this(cacheManager, null);
}
public AuthorizingRealm(CredentialsMatcher matcher) {
this(null, matcher);
}
public AuthorizingRealm(CacheManager cacheManager, CredentialsMatcher matcher) {
super();
if (cacheManager != null) setCacheManager(cacheManager);
if (matcher != null) setCredentialsMatcher(matcher);
this.authorizationCachingEnabled = true;
this.permissionResolver = new WildcardPermissionResolver();
int instanceNumber = INSTANCE_COUNT.getAndIncrement();
this.authorizationCacheName = getClass().getName() + DEFAULT_AUTHORIZATION_CACHE_SUFFIX;
if (instanceNumber > 0) {
this.authorizationCacheName = this.authorizationCacheName + "." + instanceNumber;
}
}
/*-------------------------------------------
| A C C E S S O R S / M O D I F I E R S |
============================================*/
public void setName(String name) {
super.setName(name);
String authzCacheName = this.authorizationCacheName;
if (authzCacheName != null && authzCacheName.startsWith(getClass().getName())) {
//get rid of the default class-name based cache name. Create a more meaningful one
//based on the application-unique Realm name:
this.authorizationCacheName = name + DEFAULT_AUTHORIZATION_CACHE_SUFFIX;
}
}
public void setAuthorizationCache(Cache<Object, AuthorizationInfo> authorizationCache) {
this.authorizationCache = authorizationCache;
}
public Cache<Object, AuthorizationInfo> getAuthorizationCache() {
return this.authorizationCache;
}
public String getAuthorizationCacheName() {
return authorizationCacheName;
}
@SuppressWarnings({"UnusedDeclaration"})
public void setAuthorizationCacheName(String authorizationCacheName) {
this.authorizationCacheName = authorizationCacheName;
}
/**
* Returns {@code true} if authorization caching should be utilized if a {@link CacheManager} has been
* {@link #setCacheManager(org.apache.shiro.cache.CacheManager) configured}, {@code false} otherwise.
* <p/>
* The default value is {@code true}.
*
* @return {@code true} if authorization caching should be utilized, {@code false} otherwise.
*/
public boolean isAuthorizationCachingEnabled() {
return isCachingEnabled() && authorizationCachingEnabled;
}
/**
* Sets whether or not authorization caching should be utilized if a {@link CacheManager} has been
* {@link #setCacheManager(org.apache.shiro.cache.CacheManager) configured}, {@code false} otherwise.
* <p/>
* The default value is {@code true}.
*
* @param authenticationCachingEnabled the value to set
*/
@SuppressWarnings({"UnusedDeclaration"})
public void setAuthorizationCachingEnabled(boolean authenticationCachingEnabled) {
this.authorizationCachingEnabled = authenticationCachingEnabled;
if (authenticationCachingEnabled) {
setCachingEnabled(true);
}
}
public PermissionResolver getPermissionResolver() {
return permissionResolver;
}
public void setPermissionResolver(PermissionResolver permissionResolver) {
if (permissionResolver == null) throw new IllegalArgumentException("Null PermissionResolver is not allowed");
this.permissionResolver = permissionResolver;
}
public RolePermissionResolver getRolePermissionResolver() {
return permissionRoleResolver;
}
public void setRolePermissionResolver(RolePermissionResolver permissionRoleResolver) {
this.permissionRoleResolver = permissionRoleResolver;
}
/*--------------------------------------------
| M E T H O D S |
============================================*/
/**
* Initializes this realm and potentially enables a cache, depending on configuration.
* <p/>
* When this method is called, the following logic is executed:
* <ol>
* <li>If the {@link #setAuthorizationCache cache} property has been set, it will be
* used to cache the AuthorizationInfo objects returned from {@link #getAuthorizationInfo}
* method invocations.
* All future calls to {@code getAuthorizationInfo} will attempt to use this cache first
* to alleviate any potentially unnecessary calls to an underlying data store.</li>
* <li>If the {@link #setAuthorizationCache cache} property has <b>not</b> been set,
* the {@link #setCacheManager cacheManager} property will be checked.
* If a {@code cacheManager} has been set, it will be used to create an authorization
* {@code cache}, and this newly created cache which will be used as specified in #1.</li>
* <li>If neither the {@link #setAuthorizationCache (org.apache.shiro.cache.Cache) cache}
* or {@link #setCacheManager(org.apache.shiro.cache.CacheManager) cacheManager}
* properties are set, caching will be disabled and authorization look-ups will be delegated to
* subclass implementations for each authorization check.</li>
* </ol>
*/
protected void onInit() {
super.onInit();
//trigger obtaining the authorization cache if possible
getAvailableAuthorizationCache();
}
protected void afterCacheManagerSet() {
super.afterCacheManagerSet();
//trigger obtaining the authorization cache if possible
getAvailableAuthorizationCache();
}
private Cache<Object, AuthorizationInfo> getAuthorizationCacheLazy() {
if (this.authorizationCache == null) {
if (log.isDebugEnabled()) {
log.debug("No authorizationCache instance set. Checking for a cacheManager...");
}
CacheManager cacheManager = getCacheManager();
if (cacheManager != null) {
String cacheName = getAuthorizationCacheName();
if (log.isDebugEnabled()) {
log.debug("CacheManager [" + cacheManager + "] has been configured. Building " +
"authorization cache named [" + cacheName + "]");
}
this.authorizationCache = cacheManager.getCache(cacheName);
} else {
if (log.isDebugEnabled()) {
log.debug("No cache or cacheManager properties have been set. Authorization cache cannot " +
"be obtained.");
}
}
}
return this.authorizationCache;
}
private Cache<Object, AuthorizationInfo> getAvailableAuthorizationCache() {
Cache<Object, AuthorizationInfo> cache = getAuthorizationCache();
if (cache == null && isAuthorizationCachingEnabled()) {
cache = getAuthorizationCacheLazy();
}
return cache;
}
/**
* Returns an account's authorization-specific information for the specified {@code principals},
* or {@code null} if no account could be found. The resulting {@code AuthorizationInfo} object is used
* by the other method implementations in this class to automatically perform access control checks for the
* corresponding {@code Subject}.
* <p/>
* This implementation obtains the actual {@code AuthorizationInfo} object from the subclass's
* implementation of
* {@link #doGetAuthorizationInfo(org.apache.shiro.subject.PrincipalCollection) doGetAuthorizationInfo}, and then
* caches it for efficient reuse if caching is enabled (see below).
* <p/>
* Invocations of this method should be thought of as completely orthogonal to acquiring
* {@link #getAuthenticationInfo(org.apache.shiro.authc.AuthenticationToken) authenticationInfo}, since either could
* occur in any order.
* <p/>
* For example, in "Remember Me" scenarios, the user identity is remembered (and
* assumed) for their current session and an authentication attempt during that session might never occur.
* But because their identity would be remembered, that is sufficient enough information to call this method to
* execute any necessary authorization checks. For this reason, authentication and authorization should be
* loosely coupled and not depend on each other.
* <h3>Caching</h3>
* The {@code AuthorizationInfo} values returned from this method are cached for efficient reuse
* if caching is enabled. Caching is enabled automatically when an {@link #setAuthorizationCache authorizationCache}
* instance has been explicitly configured, or if a {@link #setCacheManager cacheManager} has been configured, which
* will be used to lazily create the {@code authorizationCache} as needed.
* <p/>
* If caching is enabled, the authorization cache will be checked first and if found, will return the cached
* {@code AuthorizationInfo} immediately. If caching is disabled, or there is a cache miss, the authorization
* info will be looked up from the underlying data store via the
* {@link #doGetAuthorizationInfo(org.apache.shiro.subject.PrincipalCollection)} method, which must be implemented
* by subclasses.
* <h4>Changed Data</h4>
* If caching is enabled and if any authorization data for an account is changed at
* runtime, such as adding or removing roles and/or permissions, the subclass implementation should clear the
* cached AuthorizationInfo for that account via the
* {@link #clearCachedAuthorizationInfo(org.apache.shiro.subject.PrincipalCollection) clearCachedAuthorizationInfo}
* method. This ensures that the next call to {@code getAuthorizationInfo(PrincipalCollection)} will
* acquire the account's fresh authorization data, where it will then be cached for efficient reuse. This
* ensures that stale authorization data will not be reused.
*
* @param principals the corresponding Subject's identifying principals with which to look up the Subject's
* {@code AuthorizationInfo}.
* @return the authorization information for the account associated with the specified {@code principals},
* or {@code null} if no account could be found.
*/
protected AuthorizationInfo getAuthorizationInfo(PrincipalCollection principals) {
if (principals == null) {
return null;
}
AuthorizationInfo info = null;
if (log.isTraceEnabled()) {
log.trace("Retrieving AuthorizationInfo for principals [" + principals + "]");
}
Cache<Object, AuthorizationInfo> cache = getAvailableAuthorizationCache();
if (cache != null) {
if (log.isTraceEnabled()) {
log.trace("Attempting to retrieve the AuthorizationInfo from cache.");
}
Object key = getAuthorizationCacheKey(principals);
info = cache.get(key);
if (log.isTraceEnabled()) {
if (info == null) {
log.trace("No AuthorizationInfo found in cache for principals [" + principals + "]");
} else {
log.trace("AuthorizationInfo found in cache for principals [" + principals + "]");
}
}
}
if (info == null) {
// Call template method if the info was not found in a cache
info = doGetAuthorizationInfo(principals);
// If the info is not null and the cache has been created, then cache the authorization info.
if (info != null && cache != null) {
if (log.isTraceEnabled()) {
log.trace("Caching authorization info for principals: [" + principals + "].");
}
Object key = getAuthorizationCacheKey(principals);
cache.put(key, info);
}
}
return info;
}
protected Object getAuthorizationCacheKey(PrincipalCollection principals) {
return principals;
}
/**
* Clears out the AuthorizationInfo cache entry for the specified account.
* <p/>
* This method is provided as a convenience to subclasses so they can invalidate a cache entry when they
* change an account's authorization data (add/remove roles or permissions) during runtime. Because an account's
* AuthorizationInfo can be cached, there needs to be a way to invalidate the cache for only that account so that
* subsequent authorization operations don't used the (old) cached value if account data changes.
* <p/>
* After this method is called, the next authorization check for that same account will result in a call to
* {@link #getAuthorizationInfo(org.apache.shiro.subject.PrincipalCollection) getAuthorizationInfo}, and the
* resulting return value will be cached before being returned so it can be reused for later authorization checks.
* <p/>
* If you wish to clear out all associated cached data (and not just authorization data), use the
* {@link #clearCache(org.apache.shiro.subject.PrincipalCollection)} method instead (which will in turn call this
* method by default).
*
* @param principals the principals of the account for which to clear the cached AuthorizationInfo.
*/
protected void clearCachedAuthorizationInfo(PrincipalCollection principals) {
if (principals == null) {
return;
}
Cache<Object, AuthorizationInfo> cache = getAvailableAuthorizationCache();
//cache instance will be non-null if caching is enabled:
if (cache != null) {
Object key = getAuthorizationCacheKey(principals);
cache.remove(key);
}
}
/**
* Retrieves the AuthorizationInfo for the given principals from the underlying data store. When returning
* an instance from this method, you might want to consider using an instance of
* {@link org.apache.shiro.authz.SimpleAuthorizationInfo SimpleAuthorizationInfo}, as it is suitable in most cases.
*
* @param principals the primary identifying principals of the AuthorizationInfo that should be retrieved.
* @return the AuthorizationInfo associated with this principals.
* @see org.apache.shiro.authz.SimpleAuthorizationInfo
*/
protected abstract AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals);
//visibility changed from private to protected per SHIRO-332
protected Collection<Permission> getPermissions(AuthorizationInfo info) {
Set<Permission> permissions = new HashSet<Permission>();
if (info != null) {
Collection<Permission> perms = info.getObjectPermissions();
if (!CollectionUtils.isEmpty(perms)) {
permissions.addAll(perms);
}
perms = resolvePermissions(info.getStringPermissions());
if (!CollectionUtils.isEmpty(perms)) {
permissions.addAll(perms);
}
perms = resolveRolePermissions(info.getRoles());
if (!CollectionUtils.isEmpty(perms)) {
permissions.addAll(perms);
}
}
if (permissions.isEmpty()) {
return Collections.emptySet();
} else {
return Collections.unmodifiableSet(permissions);
}
}
private Collection<Permission> resolvePermissions(Collection<String> stringPerms) {
Collection<Permission> perms = Collections.emptySet();
PermissionResolver resolver = getPermissionResolver();
if (resolver != null && !CollectionUtils.isEmpty(stringPerms)) {
perms = new LinkedHashSet<Permission>(stringPerms.size());
for (String strPermission : stringPerms) {
if (StringUtils.clean(strPermission) != null) {
Permission permission = resolver.resolvePermission(strPermission);
perms.add(permission);
}
}
}
return perms;
}
private Collection<Permission> resolveRolePermissions(Collection<String> roleNames) {
Collection<Permission> perms = Collections.emptySet();
RolePermissionResolver resolver = getRolePermissionResolver();
if (resolver != null && !CollectionUtils.isEmpty(roleNames)) {
perms = new LinkedHashSet<Permission>(roleNames.size());
for (String roleName : roleNames) {
Collection<Permission> resolved = resolver.resolvePermissionsInRole(roleName);
if (!CollectionUtils.isEmpty(resolved)) {
perms.addAll(resolved);
}
}
}
return perms;
}
public boolean isPermitted(PrincipalCollection principals, String permission) {
Permission p = getPermissionResolver().resolvePermission(permission);
return isPermitted(principals, p);
}
public boolean isPermitted(PrincipalCollection principals, Permission permission) {
AuthorizationInfo info = getAuthorizationInfo(principals);
return isPermitted(permission, info);
}
//visibility changed from private to protected per SHIRO-332
protected boolean isPermitted(Permission permission, AuthorizationInfo info) {
Collection<Permission> perms = getPermissions(info);
if (perms != null && !perms.isEmpty()) {
for (Permission perm : perms) {
if (perm.implies(permission)) {
return true;
}
}
}
return false;
}
public boolean[] isPermitted(PrincipalCollection subjectIdentifier, String... permissions) {
List<Permission> perms = new ArrayList<Permission>(permissions.length);
for (String permString : permissions) {
perms.add(getPermissionResolver().resolvePermission(permString));
}
return isPermitted(subjectIdentifier, perms);
}
public boolean[] isPermitted(PrincipalCollection principals, List<Permission> permissions) {
AuthorizationInfo info = getAuthorizationInfo(principals);
return isPermitted(permissions, info);
}
protected boolean[] isPermitted(List<Permission> permissions, AuthorizationInfo info) {
boolean[] result;
if (permissions != null && !permissions.isEmpty()) {
int size = permissions.size();
result = new boolean[size];
int i = 0;
for (Permission p : permissions) {
result[i++] = isPermitted(p, info);
}
} else {
result = new boolean[0];
}
return result;
}
public boolean isPermittedAll(PrincipalCollection subjectIdentifier, String... permissions) {
if (permissions != null && permissions.length > 0) {
Collection<Permission> perms = new ArrayList<Permission>(permissions.length);
for (String permString : permissions) {
perms.add(getPermissionResolver().resolvePermission(permString));
}
return isPermittedAll(subjectIdentifier, perms);
}
return false;
}
public boolean isPermittedAll(PrincipalCollection principal, Collection<Permission> permissions) {
AuthorizationInfo info = getAuthorizationInfo(principal);
return info != null && isPermittedAll(permissions, info);
}
protected boolean isPermittedAll(Collection<Permission> permissions, AuthorizationInfo info) {
if (permissions != null && !permissions.isEmpty()) {
for (Permission p : permissions) {
if (!isPermitted(p, info)) {
return false;
}
}
}
return true;
}
public void checkPermission(PrincipalCollection subjectIdentifier, String permission) throws AuthorizationException {
Permission p = getPermissionResolver().resolvePermission(permission);
checkPermission(subjectIdentifier, p);
}
public void checkPermission(PrincipalCollection principal, Permission permission) throws AuthorizationException {
AuthorizationInfo info = getAuthorizationInfo(principal);
checkPermission(permission, info);
}
protected void checkPermission(Permission permission, AuthorizationInfo info) {
if (!isPermitted(permission, info)) {
String msg = "User is not permitted [" + permission + "]";
throw new UnauthorizedException(msg);
}
}
public void checkPermissions(PrincipalCollection subjectIdentifier, String... permissions) throws AuthorizationException {
if (permissions != null) {
for (String permString : permissions) {
checkPermission(subjectIdentifier, permString);
}
}
}
public void checkPermissions(PrincipalCollection principal, Collection<Permission> permissions) throws AuthorizationException {
AuthorizationInfo info = getAuthorizationInfo(principal);
checkPermissions(permissions, info);
}
protected void checkPermissions(Collection<Permission> permissions, AuthorizationInfo info) {
if (permissions != null && !permissions.isEmpty()) {
for (Permission p : permissions) {
checkPermission(p, info);
}
}
}
public boolean hasRole(PrincipalCollection principal, String roleIdentifier) {
AuthorizationInfo info = getAuthorizationInfo(principal);
return hasRole(roleIdentifier, info);
}
protected boolean hasRole(String roleIdentifier, AuthorizationInfo info) {
return info != null && info.getRoles() != null && info.getRoles().contains(roleIdentifier);
}
public boolean[] hasRoles(PrincipalCollection principal, List<String> roleIdentifiers) {
AuthorizationInfo info = getAuthorizationInfo(principal);
boolean[] result = new boolean[roleIdentifiers != null ? roleIdentifiers.size() : 0];
if (info != null) {
result = hasRoles(roleIdentifiers, info);
}
return result;
}
protected boolean[] hasRoles(List<String> roleIdentifiers, AuthorizationInfo info) {
boolean[] result;
if (roleIdentifiers != null && !roleIdentifiers.isEmpty()) {
int size = roleIdentifiers.size();
result = new boolean[size];
int i = 0;
for (String roleName : roleIdentifiers) {
result[i++] = hasRole(roleName, info);
}
} else {
result = new boolean[0];
}
return result;
}
public boolean hasAllRoles(PrincipalCollection principal, Collection<String> roleIdentifiers) {
AuthorizationInfo info = getAuthorizationInfo(principal);
return info != null && hasAllRoles(roleIdentifiers, info);
}
private boolean hasAllRoles(Collection<String> roleIdentifiers, AuthorizationInfo info) {
if (roleIdentifiers != null && !roleIdentifiers.isEmpty()) {
for (String roleName : roleIdentifiers) {
if (!hasRole(roleName, info)) {
return false;
}
}
}
return true;
}
public void checkRole(PrincipalCollection principal, String role) throws AuthorizationException {
AuthorizationInfo info = getAuthorizationInfo(principal);
checkRole(role, info);
}
protected void checkRole(String role, AuthorizationInfo info) {
if (!hasRole(role, info)) {
String msg = "User does not have role [" + role + "]";
throw new UnauthorizedException(msg);
}
}
public void checkRoles(PrincipalCollection principal, Collection<String> roles) throws AuthorizationException {
AuthorizationInfo info = getAuthorizationInfo(principal);
checkRoles(roles, info);
}
public void checkRoles(PrincipalCollection principal, String... roles) throws AuthorizationException {
checkRoles(principal, Arrays.asList(roles));
}
protected void checkRoles(Collection<String> roles, AuthorizationInfo info) {
if (roles != null && !roles.isEmpty()) {
for (String roleName : roles) {
checkRole(roleName, info);
}
}
}
/**
* Calls {@code super.doClearCache} to ensure any cached authentication data is removed and then calls
* {@link #clearCachedAuthorizationInfo(org.apache.shiro.subject.PrincipalCollection)} to remove any cached
* authorization data.
* <p/>
* If overriding in a subclass, be sure to call {@code super.doClearCache} to ensure this behavior is maintained.
*
* @param principals the principals of the account for which to clear any cached AuthorizationInfo
* @since 1.2
*/
@Override
protected void doClearCache(PrincipalCollection principals) {
super.doClearCache(principals);
clearCachedAuthorizationInfo(principals);
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.santisan.moviedb;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.BitmapFactory;
import android.os.Environment;
import android.os.StatFs;
import android.support.v4.util.LruCache;
import android.util.Log;
/**
* This class holds our bitmap caches (memory and disk).
*/
public class ImageCache {
private static final String TAG = "ImageCache";
// Default memory cache size
private static final int DEFAULT_MEM_CACHE_SIZE = 1024 * 1024 * 5; // 5MB
// Default disk cache size
private static final int DEFAULT_DISK_CACHE_SIZE = 1024 * 1024 * 10; // 10MB
// Compression settings when writing images to disk cache
private static final CompressFormat DEFAULT_COMPRESS_FORMAT = CompressFormat.JPEG;
private static final int DEFAULT_COMPRESS_QUALITY = 70;
private static final int DISK_CACHE_INDEX = 0;
// Constants to easily toggle various caches
private static final boolean DEFAULT_MEM_CACHE_ENABLED = true;
private static final boolean DEFAULT_DISK_CACHE_ENABLED = true;
private static final boolean DEFAULT_CLEAR_DISK_CACHE_ON_START = false;
private static final boolean DEFAULT_INIT_DISK_CACHE_ON_CREATE = false;
private DiskLruCache mDiskLruCache;
private LruCache<String, Bitmap> mMemoryCache;
private ImageCacheParams mCacheParams;
private final Object mDiskCacheLock = new Object();
private boolean mDiskCacheStarting = true;
/**
* Creating a new ImageCache object using the specified parameters.
*
* @param cacheParams The cache parameters to use to initialize the cache
*/
public ImageCache(ImageCacheParams cacheParams) {
init(cacheParams);
}
/**
* Creating a new ImageCache object using the default parameters.
*
* @param context The context to use
* @param uniqueName A unique name that will be appended to the cache directory
*/
public ImageCache(Context context, String uniqueName) {
init(new ImageCacheParams(context, uniqueName));
}
/**
* Initialize the cache, providing all parameters.
*
* @param cacheParams The cache parameters to initialize the cache
*/
private void init(ImageCacheParams cacheParams) {
mCacheParams = cacheParams;
// Set up memory cache
if (mCacheParams.memoryCacheEnabled) {
if (BuildConfig.DEBUG) {
Log.d(TAG, "Memory cache created (size = " + mCacheParams.memCacheSize + ")");
}
mMemoryCache = new LruCache<String, Bitmap>(mCacheParams.memCacheSize) {
/**
* Measure item size in bytes rather than units which is more practical
* for a bitmap cache
*/
@Override
protected int sizeOf(String key, Bitmap bitmap) {
return getBitmapSize(bitmap);
}
};
}
// By default the disk cache is not initialized here as it should be initialized
// on a separate thread due to disk access.
if (cacheParams.initDiskCacheOnCreate) {
// Set up disk cache
initDiskCache();
}
}
/**
* Initializes the disk cache. Note that this includes disk access so this should not be
* executed on the main/UI thread. By default an ImageCache does not initialize the disk
* cache when it is created, instead you should call initDiskCache() to initialize it on a
* background thread.
*/
public void initDiskCache() {
// Set up disk cache
synchronized (mDiskCacheLock) {
if (mDiskLruCache == null || mDiskLruCache.isClosed()) {
File diskCacheDir = mCacheParams.diskCacheDir;
if (mCacheParams.diskCacheEnabled && diskCacheDir != null) {
if (!diskCacheDir.exists()) {
diskCacheDir.mkdirs();
}
//TODO: no deberia tratar de crear una cache mas chica si no hay espacio suficiente?
if (getUsableSpace(diskCacheDir) > mCacheParams.diskCacheSize) {
try {
mDiskLruCache = DiskLruCache.open(diskCacheDir, 1, 1, mCacheParams.diskCacheSize);
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache initialized");
}
} catch (final IOException e) {
mCacheParams.diskCacheDir = null;
Log.e(TAG, "initDiskCache - " + e);
}
}
else {
Log.w(TAG, "Disk cache not initialized: usable space is " + getUsableSpace(diskCacheDir) +
" and requested size is " + mCacheParams.diskCacheSize);
}
}
}
mDiskCacheStarting = false;
mDiskCacheLock.notifyAll();
}
}
/**
* Adds a bitmap to both memory and disk cache.
* @param data Unique identifier for the bitmap to store
* @param bitmap The bitmap to store
*/
public void addBitmapToCache(String data, Bitmap bitmap) {
if (data == null || bitmap == null)
return;
addBitmapToMemCache(data, bitmap);
addBitmapToDiskCache(data, bitmap);
}
/**
* Adds a bitmap to memory cache.
* @param data Unique identifier for the bitmap to store
* @param bitmap The bitmap to store
*/
public void addBitmapToMemCache(String data, Bitmap bitmap) {
if (data == null || bitmap == null)
return;
if (mMemoryCache != null && mMemoryCache.get(data) == null)
mMemoryCache.put(data, bitmap);
}
/**
* Adds a bitmap to disk cache.
* @param data Unique identifier for the bitmap to store
* @param bitmap The bitmap to store
*/
public void addBitmapToDiskCache(String data, Bitmap bitmap) {
if (data == null || bitmap == null)
return;
synchronized (mDiskCacheLock)
{
if (mDiskLruCache == null)
return;
final String key = hashKeyForDisk(data);
OutputStream out = null;
try {
DiskLruCache.Snapshot snapshot = mDiskLruCache.get(key);
if (snapshot == null) {
final DiskLruCache.Editor editor = mDiskLruCache.edit(key);
if (editor != null) {
out = editor.newOutputStream(DISK_CACHE_INDEX);
bitmap.compress(
mCacheParams.compressFormat, mCacheParams.compressQuality, out);
editor.commit();
out.close();
}
} else {
snapshot.getInputStream(DISK_CACHE_INDEX).close();
}
} catch (final IOException e) {
Log.e(TAG, "addBitmapToCache - " + e);
} catch (Exception e) {
Log.e(TAG, "addBitmapToCache - " + e);
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {}
}
}
}
/**
* Get from memory cache.
*
* @param data Unique identifier for which item to get
* @return The bitmap if found in cache, null otherwise
*/
public Bitmap getBitmapFromMemCache(String data) {
if (mMemoryCache != null) {
final Bitmap memBitmap = mMemoryCache.get(data);
if (memBitmap != null) {
if (BuildConfig.DEBUG) {
Log.d(TAG, "Memory cache hit: " + data);
}
return memBitmap;
}
}
return null;
}
/**
* Get from disk cache.
*
* @param data Unique identifier for which item to get
* @return The bitmap if found in cache, null otherwise
*/
public Bitmap getBitmapFromDiskCache(String data) {
final String key = hashKeyForDisk(data);
synchronized (mDiskCacheLock) {
while (mDiskCacheStarting) {
try {
mDiskCacheLock.wait();
} catch (InterruptedException e) {}
}
if (mDiskLruCache != null) {
InputStream inputStream = null;
try {
final DiskLruCache.Snapshot snapshot = mDiskLruCache.get(key);
if (snapshot != null) {
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache hit: data=" + data);
Log.d(TAG, "Disk cache hit: key=" + key);
}
inputStream = snapshot.getInputStream(DISK_CACHE_INDEX);
if (inputStream != null) {
final Bitmap bitmap = BitmapFactory.decodeStream(inputStream);
return bitmap;
}
}
} catch (final IOException e) {
Log.e(TAG, "getBitmapFromDiskCache - " + e);
} finally {
try {
if (inputStream != null) {
inputStream.close();
}
} catch (IOException e) {}
}
}
return null;
}
}
/**
* Clears both the memory and disk cache associated with this ImageCache object. Note that
* this includes disk access so this should not be executed on the main/UI thread.
*/
public void clearCache()
{
clearMemCache();
clearDiskCache();
}
public void clearMemCache()
{
if (mMemoryCache != null) {
mMemoryCache.evictAll();
if (BuildConfig.DEBUG) {
Log.d(TAG, "Memory cache cleared");
}
}
}
/**
* Clears the disk cache associated with this ImageCache object. Note that this
* includes disk access so this should not be executed on the main/UI thread.
*/
public void clearDiskCache()
{
synchronized (mDiskCacheLock) {
mDiskCacheStarting = true;
if (mDiskLruCache != null && !mDiskLruCache.isClosed()) {
try {
mDiskLruCache.delete();
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache cleared");
}
} catch (IOException e) {
Log.e(TAG, "clearCache - " + e);
}
mDiskLruCache = null;
initDiskCache();
}
}
}
/**
* Flushes the disk cache associated with this ImageCache object. Note that this includes
* disk access so this should not be executed on the main/UI thread.
*/
public void flush() {
synchronized (mDiskCacheLock) {
if (mDiskLruCache != null) {
try {
mDiskLruCache.flush();
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache flushed");
}
} catch (IOException e) {
Log.e(TAG, "flush - " + e);
}
}
}
}
/**
* Closes the disk cache associated with this ImageCache object. Note that this includes
* disk access so this should not be executed on the main/UI thread.
*/
public void close() {
synchronized (mDiskCacheLock) {
if (mDiskLruCache != null) {
try {
if (!mDiskLruCache.isClosed()) {
mDiskLruCache.close();
mDiskLruCache = null;
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache closed");
}
}
} catch (IOException e) {
Log.e(TAG, "close - " + e);
}
}
}
}
public DiskLruCache getDiskLruCache() {
return mDiskLruCache;
}
/**
* A holder class that contains cache parameters.
*/
public static class ImageCacheParams {
public int memCacheSize = DEFAULT_MEM_CACHE_SIZE;
public int diskCacheSize = DEFAULT_DISK_CACHE_SIZE;
public File diskCacheDir;
public CompressFormat compressFormat = DEFAULT_COMPRESS_FORMAT;
public int compressQuality = DEFAULT_COMPRESS_QUALITY;
public boolean memoryCacheEnabled = DEFAULT_MEM_CACHE_ENABLED;
public boolean diskCacheEnabled = DEFAULT_DISK_CACHE_ENABLED;
public boolean clearDiskCacheOnStart = DEFAULT_CLEAR_DISK_CACHE_ON_START;
public boolean initDiskCacheOnCreate = DEFAULT_INIT_DISK_CACHE_ON_CREATE;
public ImageCacheParams(Context context, String uniqueName) {
diskCacheDir = getDiskCacheDir(context, uniqueName);
}
public ImageCacheParams(File diskCacheDir) {
this.diskCacheDir = diskCacheDir;
}
/**
* Sets the memory cache size based on a percentage of the device memory class.
* Eg. setting percent to 0.2 would set the memory cache to one fifth of the device memory
* class. Throws {@link IllegalArgumentException} if percent is < 0.05 or > .8.
*
* This value should be chosen carefully based on a number of factors
* Refer to the corresponding Android Training class for more discussion:
* http://developer.android.com/training/displaying-bitmaps/
*
* @param context Context to use to fetch memory class
* @param percent Percent of memory class to use to size memory cache
*/
public void setMemCacheSizePercent(Context context, float percent) {
if (percent < 0.05f || percent > 0.8f) {
throw new IllegalArgumentException("setMemCacheSizePercent - percent must be "
+ "between 0.05 and 0.8 (inclusive)");
}
memCacheSize = Math.round(percent * getMemoryClass(context) * 1024 * 1024);
}
private static int getMemoryClass(Context context) {
return ((ActivityManager) context.getSystemService(
Context.ACTIVITY_SERVICE)).getMemoryClass();
}
}
/**
* Get a usable cache directory (external if available, internal otherwise).
*
* @param context The context to use
* @param uniqueName A unique directory name to append to the cache dir
* @return The cache dir
*/
public static File getDiskCacheDir(Context context, String uniqueName) {
// Check if media is mounted or storage is built-in, if so, try and use external cache dir
// otherwise use internal cache dir
final String cachePath =
Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState()) || !isExternalStorageRemovable() ?
getExternalCacheDir(context).getPath() : context.getCacheDir().getPath();
return new File(cachePath + File.separator + uniqueName);
}
/**
* A hashing method that changes a string (like a URL) into a hash suitable for using as a
* disk filename.
*/
public static String hashKeyForDisk(String key) {
String cacheKey;
try {
final MessageDigest mDigest = MessageDigest.getInstance("MD5");
mDigest.update(key.getBytes());
cacheKey = bytesToHexString(mDigest.digest());
} catch (NoSuchAlgorithmException e) {
cacheKey = String.valueOf(key.hashCode());
}
return cacheKey;
}
private static String bytesToHexString(byte[] bytes) {
// http://stackoverflow.com/questions/332079
StringBuilder sb = new StringBuilder();
for (int i = 0; i < bytes.length; i++) {
String hex = Integer.toHexString(0xFF & bytes[i]);
if (hex.length() == 1) {
sb.append('0');
}
sb.append(hex);
}
return sb.toString();
}
/**
* Get the size in bytes of a bitmap.
* @param bitmap
* @return size in bytes
*/
@TargetApi(12)
public static int getBitmapSize(Bitmap bitmap) {
if (Utils.hasHoneycombMR1()) {
return bitmap.getByteCount();
}
// Pre HC-MR1
return bitmap.getRowBytes() * bitmap.getHeight();
}
/**
* Check if external storage is built-in or removable.
*
* @return True if external storage is removable (like an SD card), false
* otherwise.
*/
@TargetApi(9)
public static boolean isExternalStorageRemovable() {
if (Utils.hasGingerbread()) {
return Environment.isExternalStorageRemovable();
}
return true;
}
/**
* Get the external app cache directory.
*
* @param context The context to use
* @return The external cache dir
*/
@TargetApi(8)
public static File getExternalCacheDir(Context context) {
if (Utils.hasFroyo()) {
return context.getExternalCacheDir();
}
// Before Froyo we need to construct the external cache dir ourselves
final String cacheDir = "/Android/data/" + context.getPackageName() + "/cache/";
return new File(Environment.getExternalStorageDirectory().getPath() + cacheDir);
}
/**
* Check how much usable space is available at a given path.
*
* @param path The path to check
* @return The space available in bytes
*/
@TargetApi(9)
public static long getUsableSpace(File path) {
if (Utils.hasGingerbread()) {
return path.getUsableSpace();
}
final StatFs stats = new StatFs(path.getPath());
return (long) stats.getBlockSize() * (long) stats.getAvailableBlocks();
}
}
| |
/*
* Copyright 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.oned;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.ChecksumException;
import com.google.zxing.DecodeHintType;
import com.google.zxing.FormatException;
import com.google.zxing.NotFoundException;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import com.google.zxing.ResultMetadataType;
import com.google.zxing.ResultPoint;
import com.google.zxing.ResultPointCallback;
import com.google.zxing.common.BitArray;
import java.util.Arrays;
import java.util.Map;
/**
* <p>Encapsulates functionality and implementation that is common to UPC and EAN families
* of one-dimensional barcodes.</p>
*
* @author dswitkin@google.com (Daniel Switkin)
* @author Sean Owen
* @author alasdair@google.com (Alasdair Mackintosh)
*/
public abstract class UPCEANReader extends OneDReader {
/**
* Start/end guard pattern.
*/
static final int[] START_END_PATTERN = {1, 1, 1,};
/**
* Pattern marking the middle of a UPC/EAN pattern, separating the two halves.
*/
static final int[] MIDDLE_PATTERN = {1, 1, 1, 1, 1};
/**
* "Odd", or "L" patterns used to encode UPC/EAN digits.
*/
static final int[][] L_PATTERNS = {
{3, 2, 1, 1}, // 0
{2, 2, 2, 1}, // 1
{2, 1, 2, 2}, // 2
{1, 4, 1, 1}, // 3
{1, 1, 3, 2}, // 4
{1, 2, 3, 1}, // 5
{1, 1, 1, 4}, // 6
{1, 3, 1, 2}, // 7
{1, 2, 1, 3}, // 8
{3, 1, 1, 2} // 9
};
/**
* As above but also including the "even", or "G" patterns used to encode UPC/EAN digits.
*/
static final int[][] L_AND_G_PATTERNS;
// These two values are critical for determining how permissive the decoding will be.
// We've arrived at these values through a lot of trial and error. Setting them any higher
// lets false positives creep in quickly.
private static final float MAX_AVG_VARIANCE = 0.48f;
private static final float MAX_INDIVIDUAL_VARIANCE = 0.7f;
static {
L_AND_G_PATTERNS = new int[20][];
System.arraycopy(L_PATTERNS, 0, L_AND_G_PATTERNS, 0, 10);
for (int i = 10; i < 20; i++) {
int[] widths = L_PATTERNS[i - 10];
int[] reversedWidths = new int[widths.length];
for (int j = 0; j < widths.length; j++) {
reversedWidths[j] = widths[widths.length - j - 1];
}
L_AND_G_PATTERNS[i] = reversedWidths;
}
}
private final StringBuilder decodeRowStringBuffer;
private final UPCEANExtensionSupport extensionReader;
private final EANManufacturerOrgSupport eanManSupport;
protected UPCEANReader() {
decodeRowStringBuffer = new StringBuilder(20);
extensionReader = new UPCEANExtensionSupport();
eanManSupport = new EANManufacturerOrgSupport();
}
static int[] findStartGuardPattern(BitArray row) throws NotFoundException {
boolean foundStart = false;
int[] startRange = null;
int nextStart = 0;
int[] counters = new int[START_END_PATTERN.length];
while (!foundStart) {
Arrays.fill(counters, 0, START_END_PATTERN.length, 0);
startRange = findGuardPattern(row, nextStart, false, START_END_PATTERN, counters);
int start = startRange[0];
nextStart = startRange[1];
// Make sure there is a quiet zone at least as big as the start pattern before the barcode.
// If this check would run off the left edge of the image, do not accept this barcode,
// as it is very likely to be a false positive.
int quietStart = start - (nextStart - start);
if (quietStart >= 0) {
foundStart = row.isRange(quietStart, start, false);
}
}
return startRange;
}
/**
* Computes the UPC/EAN checksum on a string of digits, and reports
* whether the checksum is correct or not.
*
* @param s string of digits to check
* @return true iff string of digits passes the UPC/EAN checksum algorithm
* @throws FormatException if the string does not contain only digits
*/
static boolean checkStandardUPCEANChecksum(CharSequence s) throws FormatException {
int length = s.length();
if (length == 0) {
return false;
}
int sum = 0;
for (int i = length - 2; i >= 0; i -= 2) {
int digit = (int) s.charAt(i) - (int) '0';
if (digit < 0 || digit > 9) {
throw FormatException.getFormatInstance();
}
sum += digit;
}
sum *= 3;
for (int i = length - 1; i >= 0; i -= 2) {
int digit = (int) s.charAt(i) - (int) '0';
if (digit < 0 || digit > 9) {
throw FormatException.getFormatInstance();
}
sum += digit;
}
return sum % 10 == 0;
}
static int[] findGuardPattern(BitArray row,
int rowOffset,
boolean whiteFirst,
int[] pattern) throws NotFoundException {
return findGuardPattern(row, rowOffset, whiteFirst, pattern, new int[pattern.length]);
}
/**
* @param row row of black/white values to search
* @param rowOffset position to start search
* @param whiteFirst if true, indicates that the pattern specifies white/black/white/...
* pixel counts, otherwise, it is interpreted as black/white/black/...
* @param pattern pattern of counts of number of black and white pixels that are being
* searched for as a pattern
* @param counters array of counters, as long as pattern, to re-use
* @return start/end horizontal offset of guard pattern, as an array of two ints
* @throws NotFoundException if pattern is not found
*/
private static int[] findGuardPattern(BitArray row,
int rowOffset,
boolean whiteFirst,
int[] pattern,
int[] counters) throws NotFoundException {
int patternLength = pattern.length;
int width = row.getSize();
boolean isWhite = whiteFirst;
rowOffset = whiteFirst ? row.getNextUnset(rowOffset) : row.getNextSet(rowOffset);
int counterPosition = 0;
int patternStart = rowOffset;
for (int x = rowOffset; x < width; x++) {
if (row.get(x) ^ isWhite) {
counters[counterPosition]++;
} else {
if (counterPosition == patternLength - 1) {
if (patternMatchVariance(counters, pattern, MAX_INDIVIDUAL_VARIANCE) < MAX_AVG_VARIANCE) {
return new int[]{patternStart, x};
}
patternStart += counters[0] + counters[1];
System.arraycopy(counters, 2, counters, 0, patternLength - 2);
counters[patternLength - 2] = 0;
counters[patternLength - 1] = 0;
counterPosition--;
} else {
counterPosition++;
}
counters[counterPosition] = 1;
isWhite = !isWhite;
}
}
throw NotFoundException.getNotFoundInstance();
}
/**
* Attempts to decode a single UPC/EAN-encoded digit.
*
* @param row row of black/white values to decode
* @param counters the counts of runs of observed black/white/black/... values
* @param rowOffset horizontal offset to start decoding from
* @param patterns the set of patterns to use to decode -- sometimes different encodings
* for the digits 0-9 are used, and this indicates the encodings for 0 to 9 that should
* be used
* @return horizontal offset of first pixel beyond the decoded digit
* @throws NotFoundException if digit cannot be decoded
*/
static int decodeDigit(BitArray row, int[] counters, int rowOffset, int[][] patterns)
throws NotFoundException {
recordPattern(row, rowOffset, counters);
float bestVariance = MAX_AVG_VARIANCE; // worst variance we'll accept
int bestMatch = -1;
int max = patterns.length;
for (int i = 0; i < max; i++) {
int[] pattern = patterns[i];
float variance = patternMatchVariance(counters, pattern, MAX_INDIVIDUAL_VARIANCE);
if (variance < bestVariance) {
bestVariance = variance;
bestMatch = i;
}
}
if (bestMatch >= 0) {
return bestMatch;
} else {
throw NotFoundException.getNotFoundInstance();
}
}
@Override
public Result decodeRow(int rowNumber, BitArray row, Map<DecodeHintType, ?> hints)
throws NotFoundException, ChecksumException, FormatException {
return decodeRow(rowNumber, row, findStartGuardPattern(row), hints);
}
/**
* <p>Like {@link #decodeRow(int, BitArray, java.util.Map)}, but
* allows caller to inform method about where the UPC/EAN start pattern is
* found. This allows this to be computed once and reused across many implementations.</p>
*
* @param rowNumber row index into the image
* @param row encoding of the row of the barcode image
* @param startGuardRange start/end column where the opening start pattern was found
* @param hints optional hints that influence decoding
* @return {@link Result} encapsulating the result of decoding a barcode in the row
* @throws NotFoundException if no potential barcode is found
* @throws ChecksumException if a potential barcode is found but does not pass its checksum
* @throws FormatException if a potential barcode is found but format is invalid
*/
public Result decodeRow(int rowNumber,
BitArray row,
int[] startGuardRange,
Map<DecodeHintType, ?> hints)
throws NotFoundException, ChecksumException, FormatException {
ResultPointCallback resultPointCallback = hints == null ? null :
(ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
if (resultPointCallback != null) {
resultPointCallback.foundPossibleResultPoint(new ResultPoint(
(startGuardRange[0] + startGuardRange[1]) / 2.0f, rowNumber
));
}
StringBuilder result = decodeRowStringBuffer;
result.setLength(0);
int endStart = decodeMiddle(row, startGuardRange, result);
if (resultPointCallback != null) {
resultPointCallback.foundPossibleResultPoint(new ResultPoint(
endStart, rowNumber
));
}
int[] endRange = decodeEnd(row, endStart);
if (resultPointCallback != null) {
resultPointCallback.foundPossibleResultPoint(new ResultPoint(
(endRange[0] + endRange[1]) / 2.0f, rowNumber
));
}
// Make sure there is a quiet zone at least as big as the end pattern after the barcode. The
// spec might want more whitespace, but in practice this is the maximum we can count on.
int end = endRange[1];
int quietEnd = end + (end - endRange[0]);
if (quietEnd >= row.getSize() || !row.isRange(end, quietEnd, false)) {
throw NotFoundException.getNotFoundInstance();
}
String resultString = result.toString();
// UPC/EAN should never be less than 8 chars anyway
if (resultString.length() < 8) {
throw FormatException.getFormatInstance();
}
if (!checkChecksum(resultString)) {
throw ChecksumException.getChecksumInstance();
}
float left = (float) (startGuardRange[1] + startGuardRange[0]) / 2.0f;
float right = (float) (endRange[1] + endRange[0]) / 2.0f;
BarcodeFormat format = getBarcodeFormat();
Result decodeResult = new Result(resultString,
null, // no natural byte representation for these barcodes
new ResultPoint[]{
new ResultPoint(left, (float) rowNumber),
new ResultPoint(right, (float) rowNumber)},
format);
int extensionLength = 0;
try {
Result extensionResult = extensionReader.decodeRow(rowNumber, row, endRange[1]);
decodeResult.putMetadata(ResultMetadataType.UPC_EAN_EXTENSION, extensionResult.getText());
decodeResult.putAllMetadata(extensionResult.getResultMetadata());
decodeResult.addResultPoints(extensionResult.getResultPoints());
extensionLength = extensionResult.getText().length();
} catch (ReaderException re) {
// continue
}
int[] allowedExtensions =
hints == null ? null : (int[]) hints.get(DecodeHintType.ALLOWED_EAN_EXTENSIONS);
if (allowedExtensions != null) {
boolean valid = false;
for (int length : allowedExtensions) {
if (extensionLength == length) {
valid = true;
break;
}
}
if (!valid) {
throw NotFoundException.getNotFoundInstance();
}
}
if (format == BarcodeFormat.EAN_13 || format == BarcodeFormat.UPC_A) {
String countryID = eanManSupport.lookupCountryIdentifier(resultString);
if (countryID != null) {
decodeResult.putMetadata(ResultMetadataType.POSSIBLE_COUNTRY, countryID);
}
}
return decodeResult;
}
/**
* @param s string of digits to check
* @return {@link #checkStandardUPCEANChecksum(CharSequence)}
* @throws FormatException if the string does not contain only digits
*/
boolean checkChecksum(String s) throws FormatException {
return checkStandardUPCEANChecksum(s);
}
int[] decodeEnd(BitArray row, int endStart) throws NotFoundException {
return findGuardPattern(row, endStart, false, START_END_PATTERN);
}
/**
* Get the format of this decoder.
*
* @return The 1D format.
*/
abstract BarcodeFormat getBarcodeFormat();
/**
* Subclasses override this to decode the portion of a barcode between the start
* and end guard patterns.
*
* @param row row of black/white values to search
* @param startRange start/end offset of start guard pattern
* @param resultString {@link StringBuilder} to append decoded chars to
* @return horizontal offset of first pixel after the "middle" that was decoded
* @throws NotFoundException if decoding could not complete successfully
*/
protected abstract int decodeMiddle(BitArray row,
int[] startRange,
StringBuilder resultString) throws NotFoundException;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.hadoop.format;
import static org.hamcrest.Matchers.equalTo;
import java.io.File;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.beam.examples.WordCount;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.testing.TestStream;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.Window;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.TimestampedValue;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.TypeDescriptors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.hamcrest.MatcherAssert;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests {@link HadoopFormatIO} output with batch and stream pipeline. */
@RunWith(JUnit4.class)
public class HadoopFormatIOSequenceFileTest {
private static final Instant START_TIME = new Instant(0);
private static final String TEST_FOLDER_NAME = "test";
private static final String LOCKS_FOLDER_NAME = "locks";
private static final int REDUCERS_COUNT = 2;
private static final List<String> SENTENCES =
Arrays.asList(
"Hello world this is first streamed event",
"Hello again this is sedcond streamed event",
"Third time Hello event created",
"And last event will was sent now",
"Hello from second window",
"First event from second window");
private static final List<String> FIRST_WIN_WORDS = SENTENCES.subList(0, 4);
private static final List<String> SECOND_WIN_WORDS = SENTENCES.subList(4, 6);
private static final Duration WINDOW_DURATION = Duration.standardMinutes(1);
private static final SerializableFunction<KV<String, Long>, KV<Text, LongWritable>>
KV_STR_INT_2_TXT_LONGWRITABLE =
(KV<String, Long> element) ->
KV.of(new Text(element.getKey()), new LongWritable(element.getValue()));
private static Map<String, Long> computeWordCounts(List<String> sentences) {
return sentences.stream()
.flatMap(s -> Stream.of(s.split("\\W+")))
.map(String::toLowerCase)
.collect(Collectors.toMap(Function.identity(), s -> 1L, Long::sum));
}
@Rule public TemporaryFolder tmpFolder = new TemporaryFolder();
@Rule public TestPipeline pipeline = TestPipeline.create();
@Test
public void batchTest() {
String outputDir = getOutputDirPath("batchTest");
Configuration conf =
createWriteConf(
SequenceFileOutputFormat.class,
Text.class,
LongWritable.class,
outputDir,
REDUCERS_COUNT,
"0");
executeBatchTest(
HadoopFormatIO.<Text, LongWritable>write()
.withConfiguration(conf)
.withPartitioning()
.withExternalSynchronization(new HDFSSynchronization(getLocksDirPath())),
outputDir);
Assert.assertEquals(
"In lock folder shouldn't be any file", 0, new File(getLocksDirPath()).list().length);
}
@Test
public void batchTestWithoutPartitioner() {
String outputDir = getOutputDirPath("batchTestWithoutPartitioner");
Configuration conf =
createWriteConf(
SequenceFileOutputFormat.class,
Text.class,
LongWritable.class,
outputDir,
REDUCERS_COUNT,
"0");
executeBatchTest(
HadoopFormatIO.<Text, LongWritable>write()
.withConfiguration(conf)
.withoutPartitioning()
.withExternalSynchronization(new HDFSSynchronization(getLocksDirPath())),
outputDir);
Assert.assertEquals(
"In lock folder shouldn't be any file", 0, new File(getLocksDirPath()).list().length);
}
private void executeBatchTest(HadoopFormatIO.Write<Text, LongWritable> write, String outputDir) {
pipeline
.apply(Create.of(SENTENCES))
.apply(ParDo.of(new ConvertToLowerCaseFn()))
.apply(new WordCount.CountWords())
.apply(
"ConvertToHadoopFormat",
ParDo.of(new ConvertToHadoopFormatFn<>(KV_STR_INT_2_TXT_LONGWRITABLE)))
.setTypeDescriptor(
TypeDescriptors.kvs(
new TypeDescriptor<Text>() {}, new TypeDescriptor<LongWritable>() {}))
.apply(write);
pipeline.run();
Map<String, Long> results = loadWrittenDataAsMap(outputDir);
MatcherAssert.assertThat(results.entrySet(), equalTo(computeWordCounts(SENTENCES).entrySet()));
}
private List<KV<Text, LongWritable>> loadWrittenData(String outputDir) {
return Arrays.stream(Objects.requireNonNull(new File(outputDir).list()))
.filter(fileName -> fileName.startsWith("part-r"))
.map(fileName -> outputDir + File.separator + fileName)
.flatMap(this::extractResultsFromFile)
.collect(Collectors.toList());
}
private String getOutputDirPath(String testName) {
return Paths.get(tmpFolder.getRoot().getAbsolutePath(), TEST_FOLDER_NAME + "/" + testName)
.toAbsolutePath()
.toString();
}
private String getLocksDirPath() {
return Paths.get(tmpFolder.getRoot().getAbsolutePath(), LOCKS_FOLDER_NAME)
.toAbsolutePath()
.toString();
}
private Stream<KV<Text, LongWritable>> extractResultsFromFile(String fileName) {
try (SequenceFileRecordReader<Text, LongWritable> reader = new SequenceFileRecordReader<>()) {
Path path = new Path(fileName);
TaskAttemptContext taskContext =
HadoopFormats.createTaskAttemptContext(new Configuration(), new JobID("readJob", 0), 0);
reader.initialize(
new FileSplit(path, 0L, Long.MAX_VALUE, new String[] {"localhost"}), taskContext);
List<KV<Text, LongWritable>> result = new ArrayList<>();
while (reader.nextKeyValue()) {
result.add(
KV.of(
new Text(reader.getCurrentKey().toString()),
new LongWritable(reader.getCurrentValue().get())));
}
return result.stream();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static Configuration createWriteConf(
Class<?> outputFormatClass,
Class<?> keyClass,
Class<?> valueClass,
String path,
Integer reducersCount,
String jobId) {
return getConfiguration(outputFormatClass, keyClass, valueClass, path, reducersCount, jobId);
}
private static Configuration getConfiguration(
Class<?> outputFormatClass,
Class<?> keyClass,
Class<?> valueClass,
String path,
Integer reducersCount,
String jobId) {
Configuration conf = new Configuration();
conf.setClass(HadoopFormatIO.OUTPUT_FORMAT_CLASS_ATTR, outputFormatClass, OutputFormat.class);
conf.setClass(HadoopFormatIO.OUTPUT_KEY_CLASS, keyClass, Object.class);
conf.setClass(HadoopFormatIO.OUTPUT_VALUE_CLASS, valueClass, Object.class);
conf.setInt(HadoopFormatIO.NUM_REDUCES, reducersCount);
conf.set(HadoopFormatIO.OUTPUT_DIR, path);
conf.set(HadoopFormatIO.JOB_ID, jobId);
return conf;
}
@Test
public void streamTest() {
TestStream<String> stringsStream =
TestStream.create(StringUtf8Coder.of())
.advanceWatermarkTo(START_TIME)
.addElements(event(FIRST_WIN_WORDS.get(0), 2L))
.advanceWatermarkTo(START_TIME.plus(Duration.standardSeconds(27L)))
.addElements(
event(FIRST_WIN_WORDS.get(1), 25L),
event(FIRST_WIN_WORDS.get(2), 18L),
event(FIRST_WIN_WORDS.get(3), 28L))
.advanceWatermarkTo(START_TIME.plus(Duration.standardSeconds(65L)))
.addElements(event(SECOND_WIN_WORDS.get(0), 61L), event(SECOND_WIN_WORDS.get(1), 63L))
.advanceWatermarkToInfinity();
String outputDirPath = getOutputDirPath("streamTest");
PCollection<KV<Text, LongWritable>> dataToWrite =
pipeline
.apply(stringsStream)
.apply(Window.into(FixedWindows.of(WINDOW_DURATION)))
.apply(ParDo.of(new ConvertToLowerCaseFn()))
.apply(new WordCount.CountWords())
.apply(
"ConvertToHadoopFormat",
ParDo.of(new ConvertToHadoopFormatFn<>(KV_STR_INT_2_TXT_LONGWRITABLE)))
.setTypeDescriptor(
TypeDescriptors.kvs(
new TypeDescriptor<Text>() {}, new TypeDescriptor<LongWritable>() {}));
ConfigTransform<Text, LongWritable> configurationTransformation =
new ConfigTransform<>(outputDirPath, Text.class, LongWritable.class);
dataToWrite.apply(
HadoopFormatIO.<Text, LongWritable>write()
.withConfigurationTransform(configurationTransformation)
.withExternalSynchronization(new HDFSSynchronization(getLocksDirPath())));
pipeline.run();
Map<String, Long> values = loadWrittenDataAsMap(outputDirPath);
MatcherAssert.assertThat(
values.entrySet(), equalTo(computeWordCounts(FIRST_WIN_WORDS).entrySet()));
Assert.assertEquals(
"In lock folder shouldn't be any file", 0, new File(getLocksDirPath()).list().length);
}
private Map<String, Long> loadWrittenDataAsMap(String outputDirPath) {
return loadWrittenData(outputDirPath).stream()
.collect(
Collectors.toMap(kv -> kv.getKey().toString(), kv -> kv.getValue().get(), Long::sum));
}
private <T> TimestampedValue<T> event(T eventValue, Long timestamp) {
return TimestampedValue.of(eventValue, START_TIME.plus(new Duration(timestamp)));
}
private static class ConvertToHadoopFormatFn<InputT, OutputT> extends DoFn<InputT, OutputT> {
private final SerializableFunction<InputT, OutputT> transformFn;
ConvertToHadoopFormatFn(SerializableFunction<InputT, OutputT> transformFn) {
this.transformFn = transformFn;
}
@DoFn.ProcessElement
public void processElement(@DoFn.Element InputT element, OutputReceiver<OutputT> outReceiver) {
outReceiver.output(transformFn.apply(element));
}
}
private static class ConvertToLowerCaseFn extends DoFn<String, String> {
@DoFn.ProcessElement
public void processElement(@DoFn.Element String element, OutputReceiver<String> receiver) {
receiver.output(element.toLowerCase());
}
}
private static class ConfigTransform<KeyT, ValueT>
extends PTransform<PCollection<? extends KV<KeyT, ValueT>>, PCollectionView<Configuration>> {
private final String outputDirPath;
private final Class<?> keyClass;
private final Class<?> valueClass;
private int windowNum = 0;
private ConfigTransform(String outputDirPath, Class<?> keyClass, Class<?> valueClass) {
this.outputDirPath = outputDirPath;
this.keyClass = keyClass;
this.valueClass = valueClass;
}
@Override
public PCollectionView<Configuration> expand(PCollection<? extends KV<KeyT, ValueT>> input) {
Configuration conf =
createWriteConf(
SequenceFileOutputFormat.class,
keyClass,
valueClass,
outputDirPath,
REDUCERS_COUNT,
String.valueOf(windowNum++));
return input
.getPipeline()
.apply(Create.<Configuration>of(conf))
.apply(View.<Configuration>asSingleton().withDefaultValue(conf));
}
}
}
| |
package com.uphold.uphold_android_sdk.model;
import com.darylteo.rx.promises.java.Promise;
import com.darylteo.rx.promises.java.functions.PromiseFunction;
import com.uphold.uphold_android_sdk.client.retrofitpromise.RetrofitPaginatorPromise;
import com.uphold.uphold_android_sdk.client.retrofitpromise.RetrofitPromise;
import com.uphold.uphold_android_sdk.model.card.Address;
import com.uphold.uphold_android_sdk.model.card.AddressRequest;
import com.uphold.uphold_android_sdk.model.card.Normalized;
import com.uphold.uphold_android_sdk.model.card.Settings;
import com.uphold.uphold_android_sdk.model.transaction.TransactionRequest;
import com.uphold.uphold_android_sdk.paginator.Paginator;
import com.uphold.uphold_android_sdk.paginator.PaginatorInterface;
import com.uphold.uphold_android_sdk.service.UserCardService;
import com.uphold.uphold_android_sdk.util.Header;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Card model.
*/
public class Card extends BaseModel implements Serializable {
private final String id;
private final Map<String, String> address;
private final String available;
private final String balance;
private final String currency;
private final String label;
private final String lastTransactionAt;
private final List<Normalized> normalized;
private final Settings settings;
/**
* Constructor.
*
* @param id The id of the card.
* @param address The address of the card.
* @param available The balance available for withdrawal/usage.
* @param balance The total balance of the card, including all pending transactions.
* @param currency The currency of the card.
* @param label The display name of the card as chosen by the user.
* @param lastTransactionAt A timestamp of the last time a transaction on this card was conducted.
* @param normalized The list with the normalized fields.
* @param settings The {@link Settings} of the card.
*/
public Card(String id, Map<String, String> address, String available, String balance, String currency, String label, String lastTransactionAt, List<Normalized> normalized, Settings settings) {
this.id = id;
this.address = address;
this.available = available;
this.balance = balance;
this.currency = currency;
this.label = label;
this.lastTransactionAt = lastTransactionAt;
this.normalized = normalized;
this.settings = settings;
}
/**
* Creates a card address.
*
* @param addressRequest The {@link AddressRequest} with the name of the address to be created.
*
* @return a {@link Promise<Address>} with the transaction.
*/
public Promise<Address> createAddress(AddressRequest addressRequest) {
RetrofitPromise<Address> promise = new RetrofitPromise<>();
UserCardService userCardService = this.getUpholdRestAdapter().create(UserCardService.class);
userCardService.createAddress(this.getId(), addressRequest, promise);
return promise;
}
/**
* Creates a transaction.
*
* @param transactionRequest The {@link Transaction} with the transaction request information.
*
* @return a {@link Promise<Transaction>} with the transaction.
*/
public Promise<Transaction> createTransaction(TransactionRequest transactionRequest) {
return createTransaction(transactionRequest, false);
}
/**
* Creates a transaction.
*
* @param commit A boolean to indicate if it is to commit the transaction on the creation process.
* @param transactionRequest The {@link Transaction} with the transaction request information.
*
* @return a {@link Promise<Transaction>} with the transaction.
*/
public Promise<Transaction> createTransaction(TransactionRequest transactionRequest, Boolean commit) {
RetrofitPromise<Transaction> promise = new RetrofitPromise<>();
UserCardService userCardService = this.getUpholdRestAdapter().create(UserCardService.class);
userCardService.createTransaction(commit, this.getId(), transactionRequest, promise);
return promise;
}
/**
* Gets the card id.
*
* @return the card id
*/
public String getId() {
return id;
}
/**
* Gets the address of the card
*
* @return the address of the card
*/
public Map<String, String> getAddress() {
return address;
}
/**
* Gets the balance available in the card for withdrawal/usage.
*
* @return the balance available in the card for withdrawal/usage
*/
public String getAvailable() {
return available;
}
/**
* Gets the total balance of the card, including all pending transactions.
*
* @return the total balance of the card, including all pending transactions
*/
public String getBalance() {
return balance;
}
/**
* Gets the currency of the card.
*
* @return the currency of the card
*/
public String getCurrency() {
return currency;
}
/**
* Gets the label of the card.
*
* @return the label of the card
*/
public String getLabel() {
return label;
}
/**
* Gets the timestamp of the last time a transaction on this card was conducted.
*
* @return timestamp of the last time a transaction on this card was conducted
*/
public String getLastTransactionAt() {
return lastTransactionAt;
}
/**
* Gets the normalized fields.
*
* @return the normalized fields
*/
public List<Normalized> getNormalized() {
return normalized;
}
/**
* Gets the card settings.
*
* @return the {@link Settings} of the card
*/
public Settings getSettings() {
return settings;
}
/**
* Gets a {@link Paginator} with the transactions for the current card.
*
* @return a {@link Paginator<Transaction>} with the list of transactions for the current card.
*/
public Paginator<Transaction> getTransactions() {
RetrofitPromise<List<Transaction>> promise = new RetrofitPromise<>();
UserCardService userCardService = this.getUpholdRestAdapter().create(UserCardService.class);
userCardService.getUserCardTransactions(Header.buildRangeHeader(Paginator.DEFAULT_START, Paginator.DEFAULT_OFFSET - 1), this.getId(), promise);
PaginatorInterface<Transaction> paginatorInterface = new PaginatorInterface<Transaction>() {
@Override
public Promise<Integer> count() {
RetrofitPaginatorPromise<Transaction> promise = new RetrofitPaginatorPromise<>();
UserCardService userCardService = Card.this.getUpholdRestAdapter().create(UserCardService.class);
userCardService.getUserCardTransactions(Header.buildRangeHeader(0, 1), Card.this.getId(), promise);
return promise.then(new PromiseFunction<ResponseModel, Integer>() {
public Integer call(ResponseModel listPaginatorModel) {
return Header.getTotalNumberOfResults(listPaginatorModel.getResponse().getHeaders());
}
});
}
@Override
public Promise<List<Transaction>> getNext(String range) {
RetrofitPromise<List<Transaction>> promise = new RetrofitPromise<>();
UserCardService userCardService = Card.this.getUpholdRestAdapter().create(UserCardService.class);
userCardService.getUserCardTransactions(range, Card.this.getId(), promise);
return promise;
}
@Override
public Promise<Boolean> hasNext(final Integer currentPage) {
RetrofitPaginatorPromise<Transaction> promise = new RetrofitPaginatorPromise<>();
UserCardService userCardService = Card.this.getUpholdRestAdapter().create(UserCardService.class);
userCardService.getUserCardTransactions(Header.buildRangeHeader(0, 1), Card.this.getId(), promise);
return promise.then(new PromiseFunction<ResponseModel, Boolean>() {
public Boolean call(ResponseModel listPaginatorModel) {
Integer totalNumberOfResults = Header.getTotalNumberOfResults(listPaginatorModel.getResponse().getHeaders());
return (currentPage * Paginator.DEFAULT_OFFSET) < totalNumberOfResults;
}
});
}
};
return new Paginator<>(promise, paginatorInterface);
}
/**
* Update the card information.
*
* @param updateRequest The card fields to update.
*
* @return a {@link Promise<Card>} with the card updated.
*/
public Promise<Card> update(HashMap<String, Object> updateRequest) {
RetrofitPromise<Card> promise = new RetrofitPromise<>();
UserCardService userCardService = this.getUpholdRestAdapter().create(UserCardService.class);
userCardService.update(this.getId(), updateRequest, promise);
return promise;
}
}
| |
package edu.indiana.lib.twinpeaks.search.singlesearch;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import lombok.extern.slf4j.Slf4j;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
@Slf4j
public class CqlParser extends org.xml.sax.helpers.DefaultHandler {
//
// Index mappings (CQL -> Sirsi)
//
private static final java.util.Map INDEX_MAP = new java.util.HashMap();
static {
INDEX_MAP.put("keyword", " ");
INDEX_MAP.put("title", ":TITLE");
INDEX_MAP.put("author", ":CREATOR");
INDEX_MAP.put("subject", ":SUBJECT");
INDEX_MAP.put("year", ":DATE");
}
//
// Boolean mappings (CQL -> Sirsi)
//
private static final java.util.Map BOOL_RELATION_MAP = new java.util.HashMap();
static
{
BOOL_RELATION_MAP.put("and", " AND ");
BOOL_RELATION_MAP.put("or", " OR ");
}
//
// SAX Parsing
//
SAXParser saxParser;
StringBuilder textBuffer;
StringBuilder searchClause;
boolean inSearchClause;
java.util.Stack cqlStack;
//
// Treat all non-keyword fields as phrases?
//
final static boolean TREAT_ALL_FIELDS_AS_PHRASE = true;
//
// Are we currently parsing a keyword field?
//
boolean inKeyword;
/**
* Constructor
*/
public CqlParser()
{
// initialize stack
cqlStack = new java.util.Stack();
// initialize SAX Parser
SAXParserFactory factory;
factory = SAXParserFactory.newInstance();
factory.setNamespaceAware(true);
try
{
saxParser = factory.newSAXParser();
}
catch (org.xml.sax.SAXException e)
{
log.error("SAX exception: " + e);
}
catch (ParserConfigurationException e)
{
log.error("Parse failed: " + e);
}
}
/**
* Converts a CQL-formatted search query into a format that the Web2 Bridge
* can understand. Uses org.z3950.zing.cql.CQLNode.toXCQL() and SAX Parsing
* to convert the cqlSearchQuery into a find_command.
*
* @param cqlSearchQuery CQL-formatted search query.
* @return X-Server find_command or null if cqlSearchQuery is null or empty.
* @see org.z3950.zing.cql.CQLNode.toXCQL()
*/
public String doCQL2MetasearchCommand( String cqlSearchQuery )
{
if ( cqlSearchQuery == null || cqlSearchQuery.equals( "" ) )
{
return null;
}
org.z3950.zing.cql.CQLParser parser = new org.z3950.zing.cql.CQLParser();
org.z3950.zing.cql.CQLNode root = null;
try
{
// parse the criteria
root = parser.parse( cqlSearchQuery );
}
catch( java.io.IOException ioe )
{
log.error("CQL parse exception: " + ioe);
}
catch( org.z3950.zing.cql.CQLParseException e )
{
log.error("CQL parse exception: " + e);
}
if (root == null)
{
return null;
}
String cqlXml = root.toXCQL( 0 );
log.debug("CQL XML:");
log.debug(cqlXml);
// get cqlXml as a stream
java.io.ByteArrayInputStream byteInputStream = null;
try
{
byteInputStream = new java.io.ByteArrayInputStream(cqlXml.getBytes( "UTF8" ));
}
catch( java.io.UnsupportedEncodingException uee )
{
log.error("Encoding exception: " + uee);
}
if (byteInputStream == null)
{
return null;
}
// clear the stack
cqlStack.removeAllElements();
// run the parser
try
{
saxParser.parse( byteInputStream, this );
byteInputStream.close();
}
catch( java.io.IOException ioe )
{
log.error("IO exception: " + ioe);
}
catch( org.xml.sax.SAXException spe )
{
log.error("SAX exception: " + spe);
}
String cqlResult = ( String ) cqlStack.pop();
return cqlResult.trim();
}
//----------------------------------
// DEFAULT HANDLER IMPLEMENTATIONS -
//----------------------------------
/**
* Receive notification of the beginning of an element.
*
* @see org.xml.sax.helpers.DefaultHandler
*/
public void startElement( String namespaceURI,
String sName,
String qName,
Attributes attrs ) throws SAXException
{
// set flags to avoid overwriting duplicate tag data
if( qName.equals( "searchClause" ) )
{
inSearchClause = true;
inKeyword = false;
}
}
/**
* Receive notification of the end of an element.
*
* @see org.xml.sax.helpers.DefaultHandler
*/
public void endElement( String namespaceURI, String sName, String qName )
throws SAXException
{
// extract data
extractDataFromText( qName );
// clear flags
if( qName.equals( "searchClause" ) )
{
inSearchClause = false;
inKeyword = false;
}
}
/**
* Receive notification of character data inside an element.
*
* @see org.xml.sax.helpers.DefaultHandler
*/
public void characters( char[] buf, int offset, int len )
throws SAXException
{
// store character data
String text = new String( buf, offset, len );
if( textBuffer == null ) {
textBuffer = new StringBuilder( text );
} else {
textBuffer.append( text );
}
}
//-------------------------
// PRIVATE HELPER METHODS -
//-------------------------
private void extractDataFromText(String element)
{
if (textBuffer == null)
{
return;
}
String text = textBuffer.toString().trim();
if (text.equals("") && !element.equals("triple"))
{
return;
}
//
// check for a boolean relation value
//
if (!inSearchClause && element.equals( "value" ))
{
cqlStack.push(text);
}
//
// Construct a search clause
//
if (inSearchClause)
{
if (searchClause == null)
{
searchClause = new StringBuilder();
}
//
// General syntax: title=macbeth
//
// (title is the index, = is the value, macbeth is the term)
//
if (element.equals("index"))
{
String field = translateIndex(text);
inKeyword = ((String) INDEX_MAP.get("keyword")).equals(field);
searchClause.append(field);
}
else if (element.equals("value"))
{
//
// The relation value is always supplied as '='. The Muse syntax employed
// by the Web2 bridge doesn't need it. Use a space instead.
//
searchClause.append(' ');
}
else if (element.equals("term"))
{
//
// Search term processing:
//
// * Honor '+' encoding for embedded spaces
// * Add double quotes
//
// Example:
// aa+bb+cc <becomes> "aa bb cc"
//
if (TREAT_ALL_FIELDS_AS_PHRASE || inKeyword)
{
searchClause.append('"');
}
searchClause.append(text.replaceAll("\\+", " ").trim());
if (TREAT_ALL_FIELDS_AS_PHRASE || inKeyword)
{
searchClause.append('"');
}
cqlStack.push(searchClause.toString().trim());
searchClause = null;
}
}
//
// evaluate expression so far if we hit a </triple>
//
if( element.equals( "triple" ) )
{
String rightOperand = ( String ) cqlStack.pop();
String leftOperand = ( String ) cqlStack.pop();
String booleanRelation = ( String ) cqlStack.pop();
cqlStack.push(leftOperand.replaceAll("\\+", " ").trim()
+ translateBooleanRelation(booleanRelation)
+ rightOperand.replaceAll("\\+", " ").trim());
}
textBuffer = null;
}
/**
* Translate a CQL index to the appropriate Sirsi/Muse field name
* @param cqlIndex CQL index name
* @return Sirsi/Muse field name
*/
private String translateIndex(String cqlIndex)
{
String sirsiIndex = ( String ) INDEX_MAP.get(cqlIndex);
if (sirsiIndex == null || sirsiIndex.equals( "" ))
{
log.error("translateIndex(): bad index, using KEYWORD");
sirsiIndex = (String) INDEX_MAP.get("keyword");
}
return sirsiIndex;
}
/**
* Translate a CQL boolean term to the appropriate Sirsi/Muse syntax
* @param booleanRelation CQL boolean term
* @return Sirsi/Muse boolean operation
*/
private String translateBooleanRelation(String booleanRelation)
{
String sirsiBoolean = (String) BOOL_RELATION_MAP.get(booleanRelation);
if (sirsiBoolean == null || sirsiBoolean.equals( "" ))
{
log.error("translateBooleanRelation(): bad boolean relation, using AND" );
sirsiBoolean = (String) BOOL_RELATION_MAP.get("and");
}
return sirsiBoolean;
}
/**
* Main(): test
*/
public static void main(String[] args)
{
CqlParser parser = new CqlParser();
String query;
query = "title=\"My Title\"";
log.debug("CQL: {}", query);
log.debug("Sirsi: {}", parser.doCQL2MetasearchCommand(query));
query = "title=\"\\\"My Title\\\"\"";
log.debug("CQL: {}", query);
log.debug("Sirsi: {}", parser.doCQL2MetasearchCommand(query));
query = "title=\"My Title\" and keyword=\"some keywords\"";
log.debug("CQL: {}", query);
log.debug("Sirsi: {}", parser.doCQL2MetasearchCommand(query));
query = "title=\"My Title\" and keyword=\"some keywords\" and year=\"1999\"";
log.debug("CQL: {}", query);
log.debug("Sirsi: {}", parser.doCQL2MetasearchCommand(query));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.